Added fields option to explain api. #2203

This commit is contained in:
Martijn van Groningen 2012-08-24 16:21:27 +02:00 committed by Shay Banon
parent cd0e1226e1
commit 9b29950997
8 changed files with 280 additions and 103 deletions

View File

@ -43,6 +43,7 @@ public class ExplainRequest extends SingleShardOperationRequest {
private String routing;
private String preference;
private BytesReference source;
private String[] fields;
private boolean sourceUnsafe;
private String[] filteringAlias = Strings.EMPTY_ARRAY;
@ -125,6 +126,15 @@ public class ExplainRequest extends SingleShardOperationRequest {
return this;
}
public String[] fields() {
return fields;
}
public ExplainRequest fields(String[] fields) {
this.fields = fields;
return this;
}
public String[] filteringAlias() {
return filteringAlias;
}
@ -182,6 +192,9 @@ public class ExplainRequest extends SingleShardOperationRequest {
source = in.readBytesReference();
sourceUnsafe = false;
filteringAlias = in.readStringArray();
if (in.readBoolean()) {
fields = in.readStringArray();
}
}
@Override
@ -193,5 +206,11 @@ public class ExplainRequest extends SingleShardOperationRequest {
out.writeOptionalString(preference);
out.writeBytesReference(source);
out.writeStringArray(filteringAlias);
if (fields != null) {
out.writeBoolean(true);
out.writeStringArray(fields);
} else {
out.writeBoolean(false);
}
}
}

View File

@ -107,6 +107,14 @@ public class ExplainRequestBuilder extends BaseRequestBuilder<ExplainRequest, Ex
return this;
}
/**
* Explicitly specify the fields that will be returned for the explained document. By default, nothing is returned.
*/
public ExplainRequestBuilder setFields(String... fields) {
request.fields(fields);
return this;
}
/**
* Sets the full source of the explain request (for example, wrapping an actual query).
*/

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.get.GetResult;
import java.io.IOException;
@ -34,8 +35,9 @@ import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
*/
public class ExplainResponse implements ActionResponse {
private Explanation explanation;
private boolean exists;
private Explanation explanation;
private GetResult getResult;
ExplainResponse() {
}
@ -49,6 +51,12 @@ public class ExplainResponse implements ActionResponse {
this.explanation = explanation;
}
public ExplainResponse(boolean exists, Explanation explanation, GetResult getResult) {
this.exists = exists;
this.explanation = explanation;
this.getResult = getResult;
}
public Explanation getExplanation() {
return explanation();
}
@ -77,11 +85,22 @@ public class ExplainResponse implements ActionResponse {
return exists();
}
public GetResult getResult() {
return getResult;
}
public GetResult getGetResult() {
return getResult();
}
public void readFrom(StreamInput in) throws IOException {
exists = in.readBoolean();
if (in.readBoolean()) {
explanation = readExplanation(in);
}
if (in.readBoolean()) {
getResult = GetResult.readGetResult(in);
}
}
public void writeTo(StreamOutput out) throws IOException {
@ -92,5 +111,11 @@ public class ExplainResponse implements ActionResponse {
out.writeBoolean(true);
writeExplanation(out, explanation);
}
if (getResult == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
getResult.writeTo(out);
}
}
}

View File

@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
@ -106,7 +107,18 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().docStart;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
return new ExplainResponse(true, explanation);
if (request.fields() != null) {
if (request.fields().length == 1 && "_source".equals(request.fields()[0])) {
request.fields(null); // Load the _source field
}
// Advantage is that we're not opening a second searcher to retrieve the _source. Also
// because we are working in the same searcher in engineGetResult we can be sure that a
// doc isn't deleted between the initial get and this call.
GetResult getResult = indexShard.getService().get(result, request.id(), request.type(), request.fields());
return new ExplainResponse(true, explanation, getResult);
} else {
return new ExplainResponse(true, explanation);
}
} catch (IOException e) {
throw new ElasticSearchException("Could not explain", e);
} finally {
@ -154,7 +166,8 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
}
protected ShardIterator shards(ClusterState state, ExplainRequest request) throws ElasticSearchException {
return clusterService.operationRouting()
.getShards(clusterService.state(), request.index(), request.type(), request.id(), request.routing(), request.preference());
return clusterService.operationRouting().getShards(
clusterService.state(), request.index(), request.type(), request.id(), request.routing(), request.preference()
);
}
}

View File

@ -343,12 +343,12 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine {
UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(get.uid().text());
for (int i = 0; i < searcher.searcher().subReaders().length; i++) {
IndexReader subReader = searcher.searcher().subReaders()[i];
int docStart = searcher.searcher().docStarts()[i];
BloomFilter filter = bloomCache.filter(subReader, UidFieldMapper.NAME, asyncLoadBloomFilter);
// we know that its not there...
if (!filter.isPresent(utf8.result, 0, utf8.length)) {
continue;
}
int docStart = searcher.searcher().docStarts()[i];
UidField.DocIdAndVersion docIdAndVersion = UidField.loadDocIdAndVersion(subReader, docStart, get.uid());
if (docIdAndVersion != null && docIdAndVersion.docId != Lucene.NO_DOC) {
return new GetResult(searcher, docIdAndVersion);

View File

@ -102,6 +102,39 @@ public class ShardGetService extends AbstractIndexShardComponent {
}
}
/**
* Returns {@link GetResult} based on the specified {@link Engine.GetResult} argument.
* This method basically loads specified fields for the associated document in the engineGetResult.
* This method load the fields from the Lucene index and not from transaction log and therefore isn't realtime.
* <p>
* Note: Call <b>must</b> release engine searcher associated with engineGetResult!
*/
public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields) {
if (!engineGetResult.exists()) {
return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
}
currentMetric.inc();
try {
long now = System.nanoTime();
DocumentMapper docMapper = mapperService.documentMapper(type);
if (docMapper == null) {
missingMetric.inc(System.nanoTime() - now);
return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
}
GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, engineGetResult, docMapper);
if (getResult.exists()) {
existsMetric.inc(System.nanoTime() - now);
} else {
missingMetric.inc(System.nanoTime() - now); // This shouldn't happen...
}
return getResult;
} finally {
currentMetric.dec();
}
}
public GetResult innerGet(String type, String id, String[] gFields, boolean realtime) throws ElasticSearchException {
boolean loadSource = gFields == null || gFields.length > 0;
Engine.GetResult get = null;
@ -139,101 +172,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
try {
// break between having loaded it from translog (so we only have _source), and having a document to load
if (get.docIdAndVersion() != null) {
Map<String, GetField> fields = null;
byte[] source = null;
UidField.DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
ResetFieldSelector fieldSelector = buildFieldSelectors(docMapper, gFields);
if (fieldSelector != null) {
fieldSelector.reset();
Document doc;
try {
doc = docIdAndVersion.reader.document(docIdAndVersion.docId, fieldSelector);
} catch (IOException e) {
throw new ElasticSearchException("Failed to get type [" + type + "] and id [" + id + "]", e);
}
source = extractSource(doc, docMapper);
for (Object oField : doc.getFields()) {
Fieldable field = (Fieldable) oField;
String name = field.name();
Object value = null;
FieldMappers fieldMappers = docMapper.mappers().indexName(field.name());
if (fieldMappers != null) {
FieldMapper mapper = fieldMappers.mapper();
if (mapper != null) {
name = mapper.names().fullName();
value = mapper.valueForSearch(field);
}
}
if (value == null) {
if (field.isBinary()) {
value = new BytesArray(field.getBinaryValue(), field.getBinaryOffset(), field.getBinaryLength());
} else {
value = field.stringValue();
}
}
if (fields == null) {
fields = newHashMapWithExpectedSize(2);
}
GetField getField = fields.get(name);
if (getField == null) {
getField = new GetField(name, new ArrayList<Object>(2));
fields.put(name, getField);
}
getField.values().add(value);
}
}
// now, go and do the script thingy if needed
if (gFields != null && gFields.length > 0) {
SearchLookup searchLookup = null;
for (String field : gFields) {
Object value = null;
if (field.contains("_source.") || field.contains("doc[")) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
}
SearchScript searchScript = scriptService.search(searchLookup, "mvel", field, null);
searchScript.setNextReader(docIdAndVersion.reader);
searchScript.setNextDocId(docIdAndVersion.docId);
try {
value = searchScript.run();
} catch (RuntimeException e) {
if (logger.isTraceEnabled()) {
logger.trace("failed to execute get request script field [{}]", e, field);
}
// ignore
}
} else {
FieldMappers x = docMapper.mappers().smartName(field);
if (x == null || !x.mapper().stored()) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
searchLookup.setNextReader(docIdAndVersion.reader);
searchLookup.setNextDocId(docIdAndVersion.docId);
}
value = searchLookup.source().extractValue(field);
}
}
if (value != null) {
if (fields == null) {
fields = newHashMapWithExpectedSize(2);
}
GetField getField = fields.get(field);
if (getField == null) {
getField = new GetField(field, new ArrayList<Object>(2));
fields.put(field, getField);
}
getField.values().add(value);
}
}
}
return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source == null ? null : new BytesArray(source), fields);
return innerGetLoadFromStoredFields(type, id, gFields, get, docMapper);
} else {
Translog.Source source = get.source();
@ -334,6 +273,104 @@ public class ShardGetService extends AbstractIndexShardComponent {
}
}
private GetResult innerGetLoadFromStoredFields(String type, String id, String[] gFields, Engine.GetResult get, DocumentMapper docMapper) {
Map<String, GetField> fields = null;
byte[] source = null;
UidField.DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
ResetFieldSelector fieldSelector = buildFieldSelectors(docMapper, gFields);
if (fieldSelector != null) {
fieldSelector.reset();
Document doc;
try {
doc = docIdAndVersion.reader.document(docIdAndVersion.docId, fieldSelector);
} catch (IOException e) {
throw new ElasticSearchException("Failed to get type [" + type + "] and id [" + id + "]", e);
}
source = extractSource(doc, docMapper);
for (Object oField : doc.getFields()) {
Fieldable field = (Fieldable) oField;
String name = field.name();
Object value = null;
FieldMappers fieldMappers = docMapper.mappers().indexName(field.name());
if (fieldMappers != null) {
FieldMapper mapper = fieldMappers.mapper();
if (mapper != null) {
name = mapper.names().fullName();
value = mapper.valueForSearch(field);
}
}
if (value == null) {
if (field.isBinary()) {
value = new BytesArray(field.getBinaryValue(), field.getBinaryOffset(), field.getBinaryLength());
} else {
value = field.stringValue();
}
}
if (fields == null) {
fields = newHashMapWithExpectedSize(2);
}
GetField getField = fields.get(name);
if (getField == null) {
getField = new GetField(name, new ArrayList<Object>(2));
fields.put(name, getField);
}
getField.values().add(value);
}
}
// now, go and do the script thingy if needed
if (gFields != null && gFields.length > 0) {
SearchLookup searchLookup = null;
for (String field : gFields) {
Object value = null;
if (field.contains("_source.") || field.contains("doc[")) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
}
SearchScript searchScript = scriptService.search(searchLookup, "mvel", field, null);
searchScript.setNextReader(docIdAndVersion.reader);
searchScript.setNextDocId(docIdAndVersion.docId);
try {
value = searchScript.run();
} catch (RuntimeException e) {
if (logger.isTraceEnabled()) {
logger.trace("failed to execute get request script field [{}]", e, field);
}
// ignore
}
} else {
FieldMappers x = docMapper.mappers().smartName(field);
if (x == null || !x.mapper().stored()) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
searchLookup.setNextReader(docIdAndVersion.reader);
searchLookup.setNextDocId(docIdAndVersion.docId);
}
value = searchLookup.source().extractValue(field);
}
}
if (value != null) {
if (fields == null) {
fields = newHashMapWithExpectedSize(2);
}
GetField getField = fields.get(field);
if (getField == null) {
getField = new GetField(field, new ArrayList<Object>(2));
fields.put(field, getField);
}
getField.values().add(value);
}
}
}
return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source == null ? null : new BytesArray(source), fields);
}
private static ResetFieldSelector buildFieldSelectors(DocumentMapper docMapper, String... fields) {
if (fields == null) {
return docMapper.sourceMapper().fieldSelector();

View File

@ -26,11 +26,13 @@ import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.explain.ExplainSourceBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.rest.*;
@ -88,6 +90,14 @@ public class RestExplainAction extends BaseRestHandler {
explainRequest.source(explainSourceBuilder);
}
String sField = request.param("fields");
if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
explainRequest.fields(sFields);
}
}
client.explain(explainRequest, new ActionListener<ExplainResponse>() {
@Override
@ -95,13 +105,23 @@ public class RestExplainAction extends BaseRestHandler {
try {
XContentBuilder builder = restContentBuilder(request);
builder.startObject();
builder.field(Fields.OK, response.exists());
builder.field(Fields.MATCHES, response.match());
builder.field(Fields.OK, response.exists())
.field(Fields._INDEX, explainRequest.index())
.field(Fields._TYPE, explainRequest.type())
.field(Fields._ID, explainRequest.id())
.field(Fields.MATCHED, response.match());
if (response.hasExplanation()) {
builder.startObject(Fields.EXPLANATION);
buildExplanation(builder, response.explanation());
builder.endObject();
}
GetResult getResult = response.getResult();
if (getResult != null) {
builder.startObject(Fields.GET);
response.getResult().toXContentEmbedded(builder, request);
builder.endObject();
}
builder.endObject();
channel.sendResponse(new XContentRestResponse(request, response.exists() ? OK : NOT_FOUND, builder));
} catch (Exception e) {
@ -137,10 +157,15 @@ public class RestExplainAction extends BaseRestHandler {
static class Fields {
static final XContentBuilderString OK = new XContentBuilderString("ok");
static final XContentBuilderString MATCHES = new XContentBuilderString("matches");
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString MATCHED = new XContentBuilderString("matched");
static final XContentBuilderString EXPLANATION = new XContentBuilderString("explanation");
static final XContentBuilderString VALUE = new XContentBuilderString("value");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString DETAILS = new XContentBuilderString("details");
static final XContentBuilderString GET = new XContentBuilderString("get");
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.test.integration.explain;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.get.GetField;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndexMissingException;
@ -30,6 +31,9 @@ import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.testng.Assert.assertFalse;
@ -118,6 +122,52 @@ public class ExplainActionTests extends AbstractNodesTests {
assertFalse(response.match());
}
@Test
public void testExplainWithFields() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (IndexMissingException e) {}
client.admin().indices().prepareCreate("test").execute().actionGet();
client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
client.prepareIndex("test", "test", "1")
.setSource(
jsonBuilder().startObject()
.startObject("obj1")
.field("field1", "value1")
.field("field2", "value2")
.endObject()
.endObject()
).execute().actionGet();
client.admin().indices().prepareRefresh("test").execute().actionGet();
ExplainResponse response = client.prepareExplain("test", "test", "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFields("obj1.field1")
.execute().actionGet();
assertNotNull(response);
assertTrue(response.match());
assertNotNull(response.explanation());
assertTrue(response.explanation().isMatch());
assertThat(response.explanation().getValue(), equalTo(1.0f));
assertThat(response.getResult().exists(), equalTo(true));
assertThat(response.getResult().id(), equalTo("1"));
assertThat(response.getResult().fields().size(), equalTo(1));
assertThat(response.getResult().fields().get("obj1.field1").getValue().toString(), equalTo("value1"));
response = client.prepareExplain("test", "test", "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFields("_source.obj1")
.execute().actionGet();
assertNotNull(response);
assertTrue(response.match());
assertThat(response.getResult().fields().size(), equalTo(1));
Map<String, String> fields = (Map<String, String>) response.getResult().field("_source.obj1").getValue();
assertThat(fields.size(), equalTo(2));
assertThat(fields.get("field1"), equalTo("value1"));
assertThat(fields.get("field2"), equalTo("value2"));
}
@Test
public void testExplainWithAlias() throws Exception {
try {