mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-22 21:05:23 +00:00
HLRC GraphClient and associated tests (#32366)
GraphClient for the high level REST client and associated tests. Part of #29827 work
This commit is contained in:
parent
92076497e5
commit
38bdf9ce32
@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
|
||||
public class GraphClient {
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
GraphClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an exploration request using the Graph API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">Graph API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final GraphExploreResponse explore(GraphExploreRequest graphExploreRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore,
|
||||
options, GraphExploreResponse::fromXContext, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes an exploration request using the Graph API.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">Graph API
|
||||
* on elastic.co</a>.
|
||||
*/
|
||||
public final void exploreAsync(GraphExploreRequest graphExploreRequest,
|
||||
RequestOptions options,
|
||||
ActionListener<GraphExploreResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore,
|
||||
options, GraphExploreResponse::fromXContext, listener, emptySet());
|
||||
}
|
||||
|
||||
}
|
@ -114,6 +114,7 @@ import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
@ -1124,6 +1125,13 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
|
||||
String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
@ -209,6 +209,7 @@ public class RestHighLevelClient implements Closeable {
|
||||
private final TasksClient tasksClient = new TasksClient(this);
|
||||
private final XPackClient xPackClient = new XPackClient(this);
|
||||
private final WatcherClient watcherClient = new WatcherClient(this);
|
||||
private final GraphClient graphClient = new GraphClient(this);
|
||||
private final LicenseClient licenseClient = new LicenseClient(this);
|
||||
private final MigrationClient migrationClient = new MigrationClient(this);
|
||||
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
||||
@ -324,6 +325,16 @@ public class RestHighLevelClient implements Closeable {
|
||||
* Watcher APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public WatcherClient watcher() { return watcherClient; }
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Graph explore API that
|
||||
* is shipped with the default distribution of Elasticsearch. All of
|
||||
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">
|
||||
* Graph API on elastic.co</a> for more information.
|
||||
*/
|
||||
public GraphClient graph() { return graphClient; }
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Licensing APIs that
|
||||
|
@ -0,0 +1,139 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class GraphIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@Before
|
||||
public void indexDocuments() throws IOException {
|
||||
// Create chain of doc IDs across indices 1->2->3
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
|
||||
doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}");
|
||||
client().performRequest(doc1);
|
||||
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/1");
|
||||
doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}");
|
||||
client().performRequest(doc2);
|
||||
|
||||
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
|
||||
doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}");
|
||||
client().performRequest(doc3);
|
||||
|
||||
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
|
||||
doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}");
|
||||
client().performRequest(doc4);
|
||||
|
||||
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
|
||||
doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}");
|
||||
client().performRequest(doc5);
|
||||
|
||||
|
||||
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
|
||||
}
|
||||
|
||||
public void testCleanExplore() throws Exception {
|
||||
GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
|
||||
graphExploreRequest.indices("index1", "index2");
|
||||
graphExploreRequest.useSignificance(false);
|
||||
int numHops = 3;
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
QueryBuilder guidingQuery = null;
|
||||
if (i == 0) {
|
||||
guidingQuery = new TermQueryBuilder("const.keyword", "start");
|
||||
} else if (randomBoolean()){
|
||||
guidingQuery = new TermQueryBuilder("const.keyword", "foo");
|
||||
}
|
||||
Hop hop = graphExploreRequest.createNextHop(guidingQuery);
|
||||
VertexRequest vr = hop.addVertexRequest("num");
|
||||
vr.minDocCount(1);
|
||||
}
|
||||
Map<String, Integer> expectedTermsAndDepths = new HashMap<>();
|
||||
expectedTermsAndDepths.put("1", 0);
|
||||
expectedTermsAndDepths.put("2", 1);
|
||||
expectedTermsAndDepths.put("3", 2);
|
||||
|
||||
GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
|
||||
Map<String, Integer> actualTermsAndDepths = new HashMap<>();
|
||||
Collection<Vertex> v = exploreResponse.getVertices();
|
||||
for (Vertex vertex : v) {
|
||||
actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
|
||||
}
|
||||
assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
|
||||
assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
|
||||
ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
|
||||
assertThat(failures.length, Matchers.equalTo(0));
|
||||
|
||||
}
|
||||
|
||||
public void testBadExplore() throws Exception {
|
||||
//Explore indices where lack of fielddata=true on one index leads to partial failures
|
||||
GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
|
||||
graphExploreRequest.indices("index1", "index2", "index_no_field_data");
|
||||
graphExploreRequest.useSignificance(false);
|
||||
int numHops = 3;
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
QueryBuilder guidingQuery = null;
|
||||
if (i == 0) {
|
||||
guidingQuery = new TermQueryBuilder("const.keyword", "start");
|
||||
} else if (randomBoolean()){
|
||||
guidingQuery = new TermQueryBuilder("const.keyword", "foo");
|
||||
}
|
||||
Hop hop = graphExploreRequest.createNextHop(guidingQuery);
|
||||
VertexRequest vr = hop.addVertexRequest("num");
|
||||
vr.minDocCount(1);
|
||||
}
|
||||
Map<String, Integer> expectedTermsAndDepths = new HashMap<>();
|
||||
expectedTermsAndDepths.put("1", 0);
|
||||
expectedTermsAndDepths.put("2", 1);
|
||||
expectedTermsAndDepths.put("3", 2);
|
||||
|
||||
GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
|
||||
Map<String, Integer> actualTermsAndDepths = new HashMap<>();
|
||||
Collection<Vertex> v = exploreResponse.getVertices();
|
||||
for (Vertex vertex : v) {
|
||||
actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
|
||||
}
|
||||
assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
|
||||
assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
|
||||
ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
|
||||
assertThat(failures.length, Matchers.equalTo(1));
|
||||
assertTrue(failures[0].reason().contains("Fielddata is disabled"));
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -118,6 +118,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK;
|
||||
@ -128,6 +129,8 @@ import org.elasticsearch.index.rankeval.RestRankEvalAction;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
@ -2598,6 +2601,35 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
request.getEntity().writeTo(bos);
|
||||
assertThat(bos.toString("UTF-8"), is(body));
|
||||
}
|
||||
|
||||
public void testGraphExplore() throws Exception {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
||||
GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
|
||||
graphExploreRequest.sampleDiversityField("diversity");
|
||||
graphExploreRequest.indices("index1", "index2");
|
||||
graphExploreRequest.types("type1", "type2");
|
||||
int timeout = randomIntBetween(10000, 20000);
|
||||
graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout));
|
||||
graphExploreRequest.useSignificance(randomBoolean());
|
||||
int numHops = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
int hopNumber = i + 1;
|
||||
QueryBuilder guidingQuery = null;
|
||||
if (randomBoolean()) {
|
||||
guidingQuery = new TermQueryBuilder("field" + hopNumber, "value" + hopNumber);
|
||||
}
|
||||
Hop hop = graphExploreRequest.createNextHop(guidingQuery);
|
||||
hop.addVertexRequest("field" + hopNumber);
|
||||
hop.getVertexRequest(0).addInclude("value" + hopNumber, hopNumber);
|
||||
}
|
||||
Request request = RequestConverters.xPackGraphExplore(graphExploreRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters()));
|
||||
assertToXContentBody(graphExploreRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testXPackDeleteWatch() {
|
||||
DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest();
|
||||
|
@ -758,6 +758,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||
apiName.startsWith("license.") == false &&
|
||||
apiName.startsWith("machine_learning.") == false &&
|
||||
apiName.startsWith("watcher.") == false &&
|
||||
apiName.startsWith("graph.") == false &&
|
||||
apiName.startsWith("migration.") == false) {
|
||||
apiNotFound.add(apiName);
|
||||
}
|
||||
|
@ -0,0 +1,125 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
public class GraphDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
|
||||
@Before
|
||||
public void indexDocuments() throws IOException {
|
||||
// Create chain of doc IDs across indices 1->2->3
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
|
||||
doc1.setJsonEntity("{ \"participants\":[1,2], \"text\":\"let's start projectx\", \"attachment_md5\":\"324FHDGHFDG4564\"}");
|
||||
client().performRequest(doc1);
|
||||
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
|
||||
doc2.setJsonEntity("{\"participants\":[2,3,4], \"text\":\"got something you both may be interested in\"}");
|
||||
client().performRequest(doc2);
|
||||
|
||||
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "system out is ok for a documentation example")
|
||||
public void testExplore() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
||||
|
||||
// tag::x-pack-graph-explore-request
|
||||
GraphExploreRequest request = new GraphExploreRequest();
|
||||
request.indices("index1", "index2");
|
||||
request.useSignificance(false);
|
||||
TermQueryBuilder startingQuery = new TermQueryBuilder("text", "projectx");
|
||||
|
||||
Hop hop1 = request.createNextHop(startingQuery); // <1>
|
||||
VertexRequest people = hop1.addVertexRequest("participants"); // <2>
|
||||
people.minDocCount(1);
|
||||
VertexRequest files = hop1.addVertexRequest("attachment_md5");
|
||||
files.minDocCount(1);
|
||||
|
||||
Hop hop2 = request.createNextHop(null); // <3>
|
||||
VertexRequest vr2 = hop2.addVertexRequest("participants");
|
||||
vr2.minDocCount(5);
|
||||
|
||||
GraphExploreResponse exploreResponse = client.graph().explore(request, RequestOptions.DEFAULT); // <4>
|
||||
// end::x-pack-graph-explore-request
|
||||
|
||||
|
||||
// tag::x-pack-graph-explore-response
|
||||
Collection<Vertex> v = exploreResponse.getVertices();
|
||||
Collection<Connection> c = exploreResponse.getConnections();
|
||||
for (Vertex vertex : v) {
|
||||
System.out.println(vertex.getField() + ":" + vertex.getTerm() + // <1>
|
||||
" discovered at hop depth " + vertex.getHopDepth());
|
||||
}
|
||||
for (Connection link : c) {
|
||||
System.out.println(link.getFrom() + " -> " + link.getTo() // <2>
|
||||
+ " evidenced by " + link.getDocCount() + " docs");
|
||||
}
|
||||
// end::x-pack-graph-explore-response
|
||||
|
||||
|
||||
Collection<Vertex> initialVertices = exploreResponse.getVertices();
|
||||
|
||||
// tag::x-pack-graph-explore-expand
|
||||
GraphExploreRequest expandRequest = new GraphExploreRequest();
|
||||
expandRequest.indices("index1", "index2");
|
||||
|
||||
|
||||
Hop expandHop1 = expandRequest.createNextHop(null); // <1>
|
||||
VertexRequest fromPeople = expandHop1.addVertexRequest("participants"); // <2>
|
||||
for (Vertex vertex : initialVertices) {
|
||||
if (vertex.getField().equals("participants")) {
|
||||
fromPeople.addInclude(vertex.getTerm(), 1f);
|
||||
}
|
||||
}
|
||||
|
||||
Hop expandHop2 = expandRequest.createNextHop(null);
|
||||
VertexRequest newPeople = expandHop2.addVertexRequest("participants"); // <3>
|
||||
for (Vertex vertex : initialVertices) {
|
||||
if (vertex.getField().equals("participants")) {
|
||||
newPeople.addExclude(vertex.getTerm());
|
||||
}
|
||||
}
|
||||
|
||||
GraphExploreResponse expandResponse = client.graph().explore(expandRequest, RequestOptions.DEFAULT);
|
||||
// end::x-pack-graph-explore-expand
|
||||
|
||||
}
|
||||
|
||||
}
|
53
docs/java-rest/high-level/graph/explore.asciidoc
Normal file
53
docs/java-rest/high-level/graph/explore.asciidoc
Normal file
@ -0,0 +1,53 @@
|
||||
[[java-rest-high-x-pack-graph-explore]]
|
||||
=== X-Pack Graph explore API
|
||||
|
||||
[[java-rest-high-x-pack-graph-explore-execution]]
|
||||
==== Initial request
|
||||
|
||||
Graph queries are executed using the `explore()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-request]
|
||||
--------------------------------------------------
|
||||
<1> In this example we seed the exploration with a query to find messages mentioning the mysterious `projectx`
|
||||
<2> What we want to discover in these messages are the ids of `participants` in the communications and the md5 hashes
|
||||
of any attached files. In each case, we want to find people or files that have had at least one document connecting them
|
||||
to projectx.
|
||||
<3> The next "hop" in the graph exploration is to find the people who have shared several messages with the people or files
|
||||
discovered in the previous hop (the projectx conspirators). The `minDocCount` control is used here to ensure the people
|
||||
discovered have had at least 5 communications with projectx entities. Note we could also supply a "guiding query" here e.g. a
|
||||
date range to consider only recent communications but we pass null to consider all connections.
|
||||
<4> Finally we call the graph explore API with the GraphExploreRequest object.
|
||||
|
||||
|
||||
==== Response
|
||||
|
||||
Graph responses consist of Vertex and Connection objects (aka "nodes" and "edges" respectively):
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-response]
|
||||
--------------------------------------------------
|
||||
<1> Each Vertex is a unique term (a combination of fieldname and term value). The "hopDepth" property tells us at which point in the
|
||||
requested exploration this term was first discovered.
|
||||
<2> Each Connection is a pair of Vertex objects and includes a docCount property telling us how many times these two
|
||||
Vertex terms have been sighted together
|
||||
|
||||
|
||||
[[java-rest-high-x-pack-graph-expand-execution]]
|
||||
==== Expanding a client-side Graph
|
||||
|
||||
Typically once an application has rendered an initial GraphExploreResponse as a collection of vertices and connecting lines (graph visualization toolkits such as D3, sigma.js or Keylines help here) the next step a user may want to do is "expand". This involves finding new vertices that might be connected to the existing ones currently shown.
|
||||
|
||||
To do this we use the same `explore` method but our request contains details about which vertices to expand from and which vertices to avoid re-discovering.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-expand]
|
||||
--------------------------------------------------
|
||||
<1> Unlike the initial request we do not need to pass a starting query
|
||||
<2> In the first hop which represents our "from" vertices we explicitly list the terms that we already have on-screen and want to expand by using the `addInclude` filter.
|
||||
We can supply a boost for those terms that are considered more important to follow than others but here we select a common value of 1 for all.
|
||||
<3> When defining the second hop which represents the "to" vertices we hope to discover we explicitly list the terms that we already know about using the `addExclude` filter
|
||||
|
@ -231,3 +231,11 @@ The Java High Level REST Client supports the following Watcher APIs:
|
||||
|
||||
include::watcher/put-watch.asciidoc[]
|
||||
include::watcher/delete-watch.asciidoc[]
|
||||
|
||||
== Graph APIs
|
||||
|
||||
The Java High Level REST Client supports the following Graph APIs:
|
||||
|
||||
* <<java-rest-high-x-pack-graph-explore>>
|
||||
|
||||
include::graph/explore.asciidoc[]
|
||||
|
@ -1,141 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A Connection links exactly two {@link Vertex} objects. The basis of a
|
||||
* connection is one or more documents have been found that contain
|
||||
* this pair of terms and the strength of the connection is recorded
|
||||
* as a weight.
|
||||
*/
|
||||
public class Connection {
|
||||
Vertex from;
|
||||
Vertex to;
|
||||
double weight;
|
||||
long docCount;
|
||||
|
||||
public Connection(Vertex from, Vertex to, double weight, long docCount) {
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
this.weight = weight;
|
||||
this.docCount = docCount;
|
||||
}
|
||||
|
||||
void readFrom(StreamInput in, Map<VertexId, Vertex> vertices) throws IOException {
|
||||
from = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
to = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
weight = in.readDouble();
|
||||
docCount = in.readVLong();
|
||||
}
|
||||
|
||||
Connection() {
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(from.getField());
|
||||
out.writeString(from.getTerm());
|
||||
out.writeString(to.getField());
|
||||
out.writeString(to.getTerm());
|
||||
out.writeDouble(weight);
|
||||
out.writeVLong(docCount);
|
||||
}
|
||||
|
||||
public ConnectionId getId() {
|
||||
return new ConnectionId(from.getId(), to.getId());
|
||||
}
|
||||
|
||||
public Vertex getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public Vertex getTo() {
|
||||
return to;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a measure of the relative connectedness between a pair of {@link Vertex} objects
|
||||
*/
|
||||
public double getWeight() {
|
||||
return weight;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of documents in the sampled set that contained this
|
||||
* pair of {@link Vertex} objects.
|
||||
*/
|
||||
public long getDocCount() {
|
||||
return docCount;
|
||||
}
|
||||
|
||||
void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap<Vertex> vertexNumbers) throws IOException {
|
||||
builder.field("source", vertexNumbers.get(from));
|
||||
builder.field("target", vertexNumbers.get(to));
|
||||
builder.field("weight", weight);
|
||||
builder.field("doc_count", docCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* An identifier (implements hashcode and equals) that represents a
|
||||
* unique key for a {@link Connection}
|
||||
*/
|
||||
public static class ConnectionId {
|
||||
private final VertexId source;
|
||||
private final VertexId target;
|
||||
|
||||
public ConnectionId(VertexId source, VertexId target) {
|
||||
this.source = source;
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ConnectionId vertexId = (ConnectionId) o;
|
||||
|
||||
if (source != null ? !source.equals(vertexId.source) : vertexId.source != null)
|
||||
return false;
|
||||
if (target != null ? !target.equals(vertexId.target) : vertexId.target != null)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = source != null ? source.hashCode() : 0;
|
||||
result = 31 * result + (target != null ? target.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
public VertexId getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public VertexId getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getSource() + "->" + getTarget();
|
||||
}
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
|
||||
public class GraphExploreAction extends Action<GraphExploreResponse> {
|
||||
|
||||
|
@ -11,6 +11,9 @@ import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
|
@ -24,6 +24,15 @@ import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.license.LicenseUtils;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
@ -39,16 +48,7 @@ import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackField;
|
||||
import org.elasticsearch.xpack.core.graph.action.Connection;
|
||||
import org.elasticsearch.xpack.core.graph.action.Connection.ConnectionId;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreResponse;
|
||||
import org.elasticsearch.xpack.core.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.core.graph.action.Vertex;
|
||||
import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId;
|
||||
import org.elasticsearch.xpack.core.graph.action.VertexRequest;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
@ -12,14 +12,14 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.XPackClient;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.core.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.core.graph.action.VertexRequest;
|
||||
import org.elasticsearch.xpack.core.rest.XPackRestHandler;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -17,6 +17,11 @@ import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.license.LicenseService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
@ -24,12 +29,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.graph.Graph;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequestBuilder;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreResponse;
|
||||
import org.elasticsearch.xpack.core.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.core.graph.action.Vertex;
|
||||
import org.elasticsearch.xpack.core.graph.action.VertexRequest;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -25,9 +25,9 @@ import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.transport.RemoteClusterAware;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -46,12 +46,12 @@ import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler;
|
||||
import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField;
|
||||
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||
|
@ -0,0 +1,229 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* A Connection links exactly two {@link Vertex} objects. The basis of a
|
||||
* connection is one or more documents have been found that contain
|
||||
* this pair of terms and the strength of the connection is recorded
|
||||
* as a weight.
|
||||
*/
|
||||
public class Connection {
|
||||
private Vertex from;
|
||||
private Vertex to;
|
||||
private double weight;
|
||||
private long docCount;
|
||||
|
||||
public Connection(Vertex from, Vertex to, double weight, long docCount) {
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
this.weight = weight;
|
||||
this.docCount = docCount;
|
||||
}
|
||||
|
||||
public Connection(StreamInput in, Map<VertexId, Vertex> vertices) throws IOException {
|
||||
from = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
to = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
weight = in.readDouble();
|
||||
docCount = in.readVLong();
|
||||
}
|
||||
|
||||
Connection() {
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(from.getField());
|
||||
out.writeString(from.getTerm());
|
||||
out.writeString(to.getField());
|
||||
out.writeString(to.getTerm());
|
||||
out.writeDouble(weight);
|
||||
out.writeVLong(docCount);
|
||||
}
|
||||
|
||||
public ConnectionId getId() {
|
||||
return new ConnectionId(from.getId(), to.getId());
|
||||
}
|
||||
|
||||
public Vertex getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public Vertex getTo() {
|
||||
return to;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a measure of the relative connectedness between a pair of {@link Vertex} objects
|
||||
*/
|
||||
public double getWeight() {
|
||||
return weight;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of documents in the sampled set that contained this
|
||||
* pair of {@link Vertex} objects.
|
||||
*/
|
||||
public long getDocCount() {
|
||||
return docCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Connection other = (Connection) obj;
|
||||
return docCount == other.docCount &&
|
||||
weight == other.weight &&
|
||||
Objects.equals(to, other.to) &&
|
||||
Objects.equals(from, other.from);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(docCount, weight, from, to);
|
||||
}
|
||||
|
||||
|
||||
private static final ParseField SOURCE = new ParseField("source");
|
||||
private static final ParseField TARGET = new ParseField("target");
|
||||
private static final ParseField WEIGHT = new ParseField("weight");
|
||||
private static final ParseField DOC_COUNT = new ParseField("doc_count");
|
||||
|
||||
|
||||
void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap<Vertex> vertexNumbers) throws IOException {
|
||||
builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from));
|
||||
builder.field(TARGET.getPreferredName(), vertexNumbers.get(to));
|
||||
builder.field(WEIGHT.getPreferredName(), weight);
|
||||
builder.field(DOC_COUNT.getPreferredName(), docCount);
|
||||
}
|
||||
|
||||
//When deserializing from XContent we need to wait for all vertices to be loaded before
|
||||
// Connection objects can be created that reference them. This class provides the interim
|
||||
// state for connections.
|
||||
static class UnresolvedConnection {
|
||||
int fromIndex;
|
||||
int toIndex;
|
||||
double weight;
|
||||
long docCount;
|
||||
UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) {
|
||||
super();
|
||||
this.fromIndex = fromIndex;
|
||||
this.toIndex = toIndex;
|
||||
this.weight = weight;
|
||||
this.docCount = docCount;
|
||||
}
|
||||
public Connection resolve(List<Vertex> vertices) {
|
||||
return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<UnresolvedConnection, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"ConnectionParser", true,
|
||||
args -> {
|
||||
int source = (Integer) args[0];
|
||||
int target = (Integer) args[1];
|
||||
double weight = (Double) args[2];
|
||||
long docCount = (Long) args[3];
|
||||
return new UnresolvedConnection(source, target, weight, docCount);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareInt(constructorArg(), SOURCE);
|
||||
PARSER.declareInt(constructorArg(), TARGET);
|
||||
PARSER.declareDouble(constructorArg(), WEIGHT);
|
||||
PARSER.declareLong(constructorArg(), DOC_COUNT);
|
||||
}
|
||||
static UnresolvedConnection fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* An identifier (implements hashcode and equals) that represents a
|
||||
* unique key for a {@link Connection}
|
||||
*/
|
||||
public static class ConnectionId {
|
||||
private final VertexId source;
|
||||
private final VertexId target;
|
||||
|
||||
public ConnectionId(VertexId source, VertexId target) {
|
||||
this.source = source;
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ConnectionId vertexId = (ConnectionId) o;
|
||||
|
||||
if (source != null ? !source.equals(vertexId.source) : vertexId.source != null)
|
||||
return false;
|
||||
if (target != null ? !target.equals(vertexId.target) : vertexId.target != null)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = source != null ? source.hashCode() : 0;
|
||||
result = 31 * result + (target != null ? target.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
public VertexId getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public VertexId getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getSource() + "->" + getTarget();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +1,22 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
@ -14,6 +27,8 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
@ -29,7 +44,7 @@ import java.util.List;
|
||||
* Holds the criteria required to guide the exploration of connected terms which
|
||||
* can be returned as a graph.
|
||||
*/
|
||||
public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable {
|
||||
public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject {
|
||||
|
||||
public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop";
|
||||
public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest";
|
||||
@ -51,8 +66,8 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new graph request to run against the provided
|
||||
* indices. No indices means it will run against all indices.
|
||||
* Constructs a new graph request to run against the provided indices. No
|
||||
* indices means it will run against all indices.
|
||||
*/
|
||||
public GraphExploreRequest(String... indices) {
|
||||
this.indices = indices;
|
||||
@ -75,7 +90,6 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
return this.indices;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public GraphExploreRequest indices(String... indices) {
|
||||
this.indices = indices;
|
||||
@ -123,10 +137,14 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
}
|
||||
|
||||
/**
|
||||
* Graph exploration can be set to timeout after the given period. Search operations involved in
|
||||
* each hop are limited to the remaining time available but can still overrun due to the nature
|
||||
* of their "best efforts" timeout support. When a timeout occurs partial results are returned.
|
||||
* @param timeout a {@link TimeValue} object which determines the maximum length of time to spend exploring
|
||||
* Graph exploration can be set to timeout after the given period. Search
|
||||
* operations involved in each hop are limited to the remaining time
|
||||
* available but can still overrun due to the nature of their "best efforts"
|
||||
* timeout support. When a timeout occurs partial results are returned.
|
||||
*
|
||||
* @param timeout
|
||||
* a {@link TimeValue} object which determines the maximum length
|
||||
* of time to spend exploring
|
||||
*/
|
||||
public GraphExploreRequest timeout(TimeValue timeout) {
|
||||
if (timeout == null) {
|
||||
@ -153,10 +171,10 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
sampleSize = in.readInt();
|
||||
sampleDiversityField = in.readOptionalString();
|
||||
maxDocsPerDiversityValue = in.readInt();
|
||||
|
||||
|
||||
useSignificance = in.readBoolean();
|
||||
returnDetailedInfo = in.readBoolean();
|
||||
|
||||
|
||||
int numHops = in.readInt();
|
||||
Hop parentHop = null;
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
@ -180,7 +198,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
out.writeInt(sampleSize);
|
||||
out.writeOptionalString(sampleDiversityField);
|
||||
out.writeInt(maxDocsPerDiversityValue);
|
||||
|
||||
|
||||
out.writeBoolean(useSignificance);
|
||||
out.writeBoolean(returnDetailedInfo);
|
||||
out.writeInt(hops.size());
|
||||
@ -196,18 +214,21 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of top-matching documents that are considered during each hop (default is
|
||||
* {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE}
|
||||
* Very small values (less than 50) may not provide sufficient weight-of-evidence to identify
|
||||
* significant connections between terms.
|
||||
* <p> Very large values (many thousands) are not recommended with loosely defined queries (fuzzy queries or those
|
||||
* with many OR clauses).
|
||||
* This is because any useful signals in the best documents are diluted with irrelevant noise from low-quality matches.
|
||||
* Performance is also typically better with smaller samples as there are less look-ups required for background frequencies
|
||||
* of terms found in the documents
|
||||
* The number of top-matching documents that are considered during each hop
|
||||
* (default is {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE}
|
||||
* Very small values (less than 50) may not provide sufficient
|
||||
* weight-of-evidence to identify significant connections between terms.
|
||||
* <p>
|
||||
* Very large values (many thousands) are not recommended with loosely
|
||||
* defined queries (fuzzy queries or those with many OR clauses). This is
|
||||
* because any useful signals in the best documents are diluted with
|
||||
* irrelevant noise from low-quality matches. Performance is also typically
|
||||
* better with smaller samples as there are less look-ups required for
|
||||
* background frequencies of terms found in the documents
|
||||
* </p>
|
||||
*
|
||||
* @param maxNumberOfDocsPerHop shard-level sample size in documents
|
||||
* @param maxNumberOfDocsPerHop
|
||||
* shard-level sample size in documents
|
||||
*/
|
||||
public void sampleSize(int maxNumberOfDocsPerHop) {
|
||||
sampleSize = maxNumberOfDocsPerHop;
|
||||
@ -242,10 +263,13 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
}
|
||||
|
||||
/**
|
||||
* Controls the choice of algorithm used to select interesting terms. The default
|
||||
* value is true which means terms are selected based on significance (see the {@link SignificantTerms}
|
||||
* aggregation) rather than popularity (using the {@link TermsAggregator}).
|
||||
* @param value true if the significant_terms algorithm should be used.
|
||||
* Controls the choice of algorithm used to select interesting terms. The
|
||||
* default value is true which means terms are selected based on
|
||||
* significance (see the {@link SignificantTerms} aggregation) rather than
|
||||
* popularity (using the {@link TermsAggregator}).
|
||||
*
|
||||
* @param value
|
||||
* true if the significant_terms algorithm should be used.
|
||||
*/
|
||||
public void useSignificance(boolean value) {
|
||||
this.useSignificance = value;
|
||||
@ -254,32 +278,37 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
public boolean useSignificance() {
|
||||
return useSignificance;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return detailed information about vertex frequencies as part of JSON results - defaults to false
|
||||
* @param value true if detailed information is required in JSON responses
|
||||
* Return detailed information about vertex frequencies as part of JSON
|
||||
* results - defaults to false
|
||||
*
|
||||
* @param value
|
||||
* true if detailed information is required in JSON responses
|
||||
*/
|
||||
public void returnDetailedInfo(boolean value) {
|
||||
this.returnDetailedInfo = value;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean returnDetailedInfo() {
|
||||
return returnDetailedInfo;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add a stage in the graph exploration. Each hop represents a stage of
|
||||
* querying elasticsearch to identify terms which can then be connnected
|
||||
* to other terms in a subsequent hop.
|
||||
* @param guidingQuery optional choice of query which influences which documents
|
||||
* are considered in this stage
|
||||
* @return a {@link Hop} object that holds settings for a stage in the graph exploration
|
||||
* Add a stage in the graph exploration. Each hop represents a stage of
|
||||
* querying elasticsearch to identify terms which can then be connnected to
|
||||
* other terms in a subsequent hop.
|
||||
*
|
||||
* @param guidingQuery
|
||||
* optional choice of query which influences which documents are
|
||||
* considered in this stage
|
||||
* @return a {@link Hop} object that holds settings for a stage in the graph
|
||||
* exploration
|
||||
*/
|
||||
public Hop createNextHop(QueryBuilder guidingQuery) {
|
||||
Hop parent = null;
|
||||
if (hops.size() > 0) {
|
||||
parent = hops.get(hops.size() - 1);
|
||||
parent = hops.get(hops.size() - 1);
|
||||
}
|
||||
Hop newHop = new Hop(parent);
|
||||
newHop.guidingQuery = guidingQuery;
|
||||
@ -330,6 +359,43 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
builder.startObject("controls");
|
||||
{
|
||||
if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) {
|
||||
builder.field("sample_size", sampleSize);
|
||||
}
|
||||
if (sampleDiversityField != null) {
|
||||
builder.startObject("sample_diversity");
|
||||
builder.field("field", sampleDiversityField);
|
||||
builder.field("max_docs_per_value", maxDocsPerDiversityValue);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.field("use_significance", useSignificance);
|
||||
if (returnDetailedInfo) {
|
||||
builder.field("return_detailed_stats", returnDetailedInfo);
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
for (Hop hop : hops) {
|
||||
if (hop.parentHop != null) {
|
||||
builder.startObject("connections");
|
||||
}
|
||||
hop.toXContent(builder, params);
|
||||
}
|
||||
for (Hop hop : hops) {
|
||||
if (hop.parentHop != null) {
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
@ -1,28 +1,49 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.graph.action.Connection.ConnectionId;
|
||||
import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects
|
||||
@ -100,8 +121,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
||||
|
||||
connections = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
Connection e = new Connection();
|
||||
e.readFrom(in, vertices);
|
||||
Connection e = new Connection(in, vertices);
|
||||
connections.put(e.getId(), e);
|
||||
}
|
||||
|
||||
@ -146,23 +166,19 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
||||
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String TOOK = "took";
|
||||
static final String TIMED_OUT = "timed_out";
|
||||
static final String INDICES = "_indices";
|
||||
static final String FAILURES = "failures";
|
||||
static final String VERTICES = "vertices";
|
||||
static final String CONNECTIONS = "connections";
|
||||
|
||||
}
|
||||
private static final ParseField TOOK = new ParseField("took");
|
||||
private static final ParseField TIMED_OUT = new ParseField("timed_out");
|
||||
private static final ParseField VERTICES = new ParseField("vertices");
|
||||
private static final ParseField CONNECTIONS = new ParseField("connections");
|
||||
private static final ParseField FAILURES = new ParseField("failures");
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Fields.TOOK, tookInMillis);
|
||||
builder.field(Fields.TIMED_OUT, timedOut);
|
||||
builder.field(TOOK.getPreferredName(), tookInMillis);
|
||||
builder.field(TIMED_OUT.getPreferredName(), timedOut);
|
||||
|
||||
builder.startArray(Fields.FAILURES);
|
||||
builder.startArray(FAILURES.getPreferredName());
|
||||
if (shardFailures != null) {
|
||||
for (ShardOperationFailedException shardFailure : shardFailures) {
|
||||
builder.startObject();
|
||||
@ -178,7 +194,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
||||
extraParams.put(RETURN_DETAILED_INFO_PARAM, Boolean.toString(returnDetailedInfo));
|
||||
Params extendedParams = new DelegatingMapParams(extraParams, params);
|
||||
|
||||
builder.startArray(Fields.VERTICES);
|
||||
builder.startArray(VERTICES.getPreferredName());
|
||||
for (Vertex vertex : vertices.values()) {
|
||||
builder.startObject();
|
||||
vertexNumbers.put(vertex, vertexNumbers.size());
|
||||
@ -187,7 +203,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
builder.startArray(Fields.CONNECTIONS);
|
||||
builder.startArray(CONNECTIONS.getPreferredName());
|
||||
for (Connection connection : connections.values()) {
|
||||
builder.startObject();
|
||||
connection.toXContent(builder, extendedParams, vertexNumbers);
|
||||
@ -198,5 +214,48 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<GraphExploreResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"GraphExploreResponsenParser", true,
|
||||
args -> {
|
||||
GraphExploreResponse result = new GraphExploreResponse();
|
||||
result.vertices = new HashMap<>();
|
||||
result.connections = new HashMap<>();
|
||||
|
||||
result.tookInMillis = (Long) args[0];
|
||||
result.timedOut = (Boolean) args[1];
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Vertex> vertices = (List<Vertex>) args[2];
|
||||
@SuppressWarnings("unchecked")
|
||||
List<UnresolvedConnection> unresolvedConnections = (List<UnresolvedConnection>) args[3];
|
||||
@SuppressWarnings("unchecked")
|
||||
List<ShardSearchFailure> failures = (List<ShardSearchFailure>) args[4];
|
||||
for (Vertex vertex : vertices) {
|
||||
// reverse-engineer if detailed stats were requested -
|
||||
// mainly here for testing framework's equality tests
|
||||
result.returnDetailedInfo = result.returnDetailedInfo || vertex.getFg() > 0;
|
||||
result.vertices.put(vertex.getId(), vertex);
|
||||
}
|
||||
for (UnresolvedConnection unresolvedConnection : unresolvedConnections) {
|
||||
Connection resolvedConnection = unresolvedConnection.resolve(vertices);
|
||||
result.connections.put(resolvedConnection.getId(), resolvedConnection);
|
||||
}
|
||||
if (failures.size() > 0) {
|
||||
result.shardFailures = failures.toArray(new ShardSearchFailure[failures.size()]);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareLong(constructorArg(), TOOK);
|
||||
PARSER.declareBoolean(constructorArg(), TIMED_OUT);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Vertex.fromXContent(p), VERTICES);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> UnresolvedConnection.fromXContent(p), CONNECTIONS);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ShardSearchFailure.fromXContent(p), FAILURES);
|
||||
}
|
||||
|
||||
public static GraphExploreResponse fromXContext(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
}
|
@ -1,14 +1,29 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
||||
@ -41,7 +56,7 @@ import java.util.List;
|
||||
* </p>
|
||||
*
|
||||
*/
|
||||
public class Hop {
|
||||
public class Hop implements ToXContentFragment{
|
||||
final Hop parentHop;
|
||||
List<VertexRequest> vertices = null;
|
||||
QueryBuilder guidingQuery = null;
|
||||
@ -139,4 +154,20 @@ public class Hop {
|
||||
public VertexRequest getVertexRequest(int requestNumber) {
|
||||
return getEffectiveVertexRequests().get(requestNumber);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (guidingQuery != null) {
|
||||
builder.field("query");
|
||||
guidingQuery.toXContent(builder, params);
|
||||
}
|
||||
if(vertices != null && vertices.size()>0) {
|
||||
builder.startArray("vertices");
|
||||
for (VertexRequest vertexRequest : vertices) {
|
||||
vertexRequest.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
@ -1,16 +1,36 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* A vertex in a graph response represents a single term (a field and value pair)
|
||||
@ -27,6 +47,13 @@ public class Vertex implements ToXContentFragment {
|
||||
private final int depth;
|
||||
private final long bg;
|
||||
private long fg;
|
||||
private static final ParseField FIELD = new ParseField("field");
|
||||
private static final ParseField TERM = new ParseField("term");
|
||||
private static final ParseField WEIGHT = new ParseField("weight");
|
||||
private static final ParseField DEPTH = new ParseField("depth");
|
||||
private static final ParseField FG = new ParseField("fg");
|
||||
private static final ParseField BG = new ParseField("bg");
|
||||
|
||||
|
||||
public Vertex(String field, String term, double weight, int depth, long bg, long fg) {
|
||||
super();
|
||||
@ -50,20 +77,72 @@ public class Vertex implements ToXContentFragment {
|
||||
out.writeVLong(bg);
|
||||
out.writeVLong(fg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, term, weight, depth, bg, fg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Vertex other = (Vertex) obj;
|
||||
return depth == other.depth &&
|
||||
weight == other.weight &&
|
||||
bg == other.bg &&
|
||||
fg == other.fg &&
|
||||
Objects.equals(field, other.field) &&
|
||||
Objects.equals(term, other.term);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false);
|
||||
builder.field("field", field);
|
||||
builder.field("term", term);
|
||||
builder.field("weight", weight);
|
||||
builder.field("depth", depth);
|
||||
builder.field(FIELD.getPreferredName(), field);
|
||||
builder.field(TERM.getPreferredName(), term);
|
||||
builder.field(WEIGHT.getPreferredName(), weight);
|
||||
builder.field(DEPTH.getPreferredName(), depth);
|
||||
if (returnDetailedInfo) {
|
||||
builder.field("fg", fg);
|
||||
builder.field("bg", bg);
|
||||
builder.field(FG.getPreferredName(), fg);
|
||||
builder.field(BG.getPreferredName(), bg);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
private static final ConstructingObjectParser<Vertex, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"VertexParser", true,
|
||||
args -> {
|
||||
String field = (String) args[0];
|
||||
String term = (String) args[1];
|
||||
double weight = (Double) args[2];
|
||||
int depth = (Integer) args[3];
|
||||
Long optionalBg = (Long) args[4];
|
||||
Long optionalFg = (Long) args[5];
|
||||
long bg = optionalBg == null ? 0 : optionalBg;
|
||||
long fg = optionalFg == null ? 0 : optionalFg;
|
||||
return new Vertex(field, term, weight, depth, bg, fg);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), FIELD);
|
||||
PARSER.declareString(constructorArg(), TERM);
|
||||
PARSER.declareDouble(constructorArg(), WEIGHT);
|
||||
PARSER.declareInt(constructorArg(), DEPTH);
|
||||
PARSER.declareLong(optionalConstructorArg(), BG);
|
||||
PARSER.declareLong(optionalConstructorArg(), FG);
|
||||
}
|
||||
|
||||
static Vertex fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return a {@link VertexId} object that uniquely identifies this Vertex
|
@ -1,13 +1,28 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.graph.action;
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
@ -21,9 +36,10 @@ import java.util.Set;
|
||||
* inclusion list to filter which terms are considered.
|
||||
*
|
||||
*/
|
||||
public class VertexRequest {
|
||||
public class VertexRequest implements ToXContentObject {
|
||||
private String fieldName;
|
||||
private int size = 5;
|
||||
private int size = DEFAULT_SIZE;
|
||||
public static final int DEFAULT_SIZE = 5;
|
||||
private Map<String, TermBoost> includes;
|
||||
private Set<String> excludes;
|
||||
public static final int DEFAULT_MIN_DOC_COUNT = 3;
|
||||
@ -195,4 +211,38 @@ public class VertexRequest {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("field", fieldName);
|
||||
if (size != DEFAULT_SIZE) {
|
||||
builder.field("size", size);
|
||||
}
|
||||
if (minDocCount != DEFAULT_MIN_DOC_COUNT) {
|
||||
builder.field("min_doc_count", minDocCount);
|
||||
}
|
||||
if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) {
|
||||
builder.field("shard_min_doc_count", shardMinDocCount);
|
||||
}
|
||||
if(includes!=null) {
|
||||
builder.startArray("include");
|
||||
for (TermBoost tb : includes.values()) {
|
||||
builder.startObject();
|
||||
builder.field("term", tb.term);
|
||||
builder.field("boost", tb.boost);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if(excludes!=null) {
|
||||
builder.startArray("exclude");
|
||||
for (String value : excludes) {
|
||||
builder.value(value);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Request and Response objects for the default distribution's Graph
|
||||
* APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> {
|
||||
|
||||
@Override
|
||||
protected GraphExploreResponse createTestInstance() {
|
||||
return createInstance(0);
|
||||
}
|
||||
private static GraphExploreResponse createInstance(int numFailures) {
|
||||
int numItems = randomIntBetween(4, 128);
|
||||
boolean timedOut = randomBoolean();
|
||||
boolean showDetails = randomBoolean();
|
||||
long overallTookInMillis = randomNonNegativeLong();
|
||||
Map<Vertex.VertexId, Vertex> vertices = new HashMap<>();
|
||||
Map<Connection.ConnectionId, Connection> connections = new HashMap<>();
|
||||
ShardOperationFailedException [] failures = new ShardOperationFailedException [numFailures];
|
||||
for (int i = 0; i < failures.length; i++) {
|
||||
failures[i] = new ShardSearchFailure(new ElasticsearchException("an error"));
|
||||
}
|
||||
|
||||
//Create random set of vertices
|
||||
for (int i = 0; i < numItems; i++) {
|
||||
Vertex v = new Vertex("field1", randomAlphaOfLength(5), randomDouble(), 0,
|
||||
showDetails?randomIntBetween(100, 200):0,
|
||||
showDetails?randomIntBetween(1, 100):0);
|
||||
vertices.put(v.getId(), v);
|
||||
}
|
||||
|
||||
//Wire up half the vertices randomly
|
||||
Vertex[] vs = vertices.values().toArray(new Vertex[vertices.size()]);
|
||||
for (int i = 0; i < numItems/2; i++) {
|
||||
Vertex v1 = vs[randomIntBetween(0, vs.length-1)];
|
||||
Vertex v2 = vs[randomIntBetween(0, vs.length-1)];
|
||||
if(v1 != v2) {
|
||||
Connection conn = new Connection(v1, v2, randomDouble(), randomLongBetween(1, 10));
|
||||
connections.put(conn.getId(), conn);
|
||||
}
|
||||
}
|
||||
return new GraphExploreResponse(overallTookInMillis, timedOut, failures, vertices, connections, showDetails);
|
||||
}
|
||||
|
||||
|
||||
private static GraphExploreResponse createTestInstanceWithFailures() {
|
||||
return createInstance(randomIntBetween(1, 128));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GraphExploreResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return GraphExploreResponse.fromXContext(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected Predicate<String> getRandomFieldsExcludeFilterWhenResultHasErrors() {
|
||||
return field -> field.startsWith("responses");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertEqualInstances( GraphExploreResponse expectedInstance, GraphExploreResponse newInstance) {
|
||||
assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook()));
|
||||
assertThat(newInstance.isTimedOut(), equalTo(expectedInstance.isTimedOut()));
|
||||
|
||||
Connection[] newConns = newInstance.getConnections().toArray(new Connection[0]);
|
||||
Connection[] expectedConns = expectedInstance.getConnections().toArray(new Connection[0]);
|
||||
assertArrayEquals(expectedConns, newConns);
|
||||
|
||||
Vertex[] newVertices = newInstance.getVertices().toArray(new Vertex[0]);
|
||||
Vertex[] expectedVertices = expectedInstance.getVertices().toArray(new Vertex[0]);
|
||||
assertArrayEquals(expectedVertices, newVertices);
|
||||
|
||||
ShardOperationFailedException[] newFailures = newInstance.getShardFailures();
|
||||
ShardOperationFailedException[] expectedFailures = expectedInstance.getShardFailures();
|
||||
assertEquals(expectedFailures.length, newFailures.length);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test parsing {@link GraphExploreResponse} with inner failures as they don't support asserting on xcontent equivalence, given
|
||||
* exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()}
|
||||
* without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end.
|
||||
*/
|
||||
public void testFromXContentWithFailures() throws IOException {
|
||||
Supplier< GraphExploreResponse> instanceSupplier = GraphExploreResponseTests::createTestInstanceWithFailures;
|
||||
//with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata,
|
||||
//but that does not bother our assertions, as we only want to test that we don't break.
|
||||
boolean supportsUnknownFields = true;
|
||||
//exceptions are not of the same type whenever parsed back
|
||||
boolean assertToXContentEquivalence = false;
|
||||
AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY,
|
||||
getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance,
|
||||
this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user