Fix roles parsing in client nodes sniffer (#52888)
We mades roles pluggable, but never updated the client to account for this. This means that when speaking to a modern cluster, application logs are spammed with warning messages around unrecognized roles. This commit addresses this by accounting for the fact that roles can extend beyond master/data/ingest now.
This commit is contained in:
parent
19a6c5d980
commit
d568345f1a
|
@ -19,12 +19,13 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
* Metadata about an {@link HttpHost} running Elasticsearch.
|
||||
|
@ -175,42 +176,35 @@ public class Node {
|
|||
* Role information about an Elasticsearch process.
|
||||
*/
|
||||
public static final class Roles {
|
||||
private final boolean masterEligible;
|
||||
private final boolean data;
|
||||
private final boolean ingest;
|
||||
|
||||
public Roles(boolean masterEligible, boolean data, boolean ingest) {
|
||||
this.masterEligible = masterEligible;
|
||||
this.data = data;
|
||||
this.ingest = ingest;
|
||||
private final Set<String> roles;
|
||||
|
||||
public Roles(final Set<String> roles) {
|
||||
this.roles = new TreeSet<>(roles);
|
||||
}
|
||||
|
||||
/**
|
||||
* Teturns whether or not the node <strong>could</strong> be elected master.
|
||||
*/
|
||||
public boolean isMasterEligible() {
|
||||
return masterEligible;
|
||||
return roles.contains("master");
|
||||
}
|
||||
/**
|
||||
* Teturns whether or not the node stores data.
|
||||
*/
|
||||
public boolean isData() {
|
||||
return data;
|
||||
return roles.contains("data");
|
||||
}
|
||||
/**
|
||||
* Teturns whether or not the node runs ingest pipelines.
|
||||
*/
|
||||
public boolean isIngest() {
|
||||
return ingest;
|
||||
return roles.contains("ingest");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder result = new StringBuilder(3);
|
||||
if (masterEligible) result.append('m');
|
||||
if (data) result.append('d');
|
||||
if (ingest) result.append('i');
|
||||
return result.toString();
|
||||
return String.join(",", roles);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -219,14 +213,13 @@ public class Node {
|
|||
return false;
|
||||
}
|
||||
Roles other = (Roles) obj;
|
||||
return masterEligible == other.masterEligible
|
||||
&& data == other.data
|
||||
&& ingest == other.ingest;
|
||||
return roles.equals(other.roles);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(masterEligible, data, ingest);
|
||||
return roles.hashCode();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
@ -51,9 +53,19 @@ public class HasAttributeNodeSelectorTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
private static Node dummyNode(Map<String, List<String>> attributes) {
|
||||
final Set<String> roles = new TreeSet<>();
|
||||
if (randomBoolean()) {
|
||||
roles.add("master");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
roles.add("data");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
roles.add("ingest");
|
||||
}
|
||||
return new Node(new HttpHost("dummy"), Collections.<HttpHost>emptySet(),
|
||||
randomAsciiAlphanumOfLength(5), randomAsciiAlphanumOfLength(5),
|
||||
new Roles(randomBoolean(), randomBoolean(), randomBoolean()),
|
||||
new Roles(roles),
|
||||
attributes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,8 @@ import org.elasticsearch.client.Node.Roles;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
|
@ -64,9 +66,19 @@ public class NodeSelectorTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
private static Node dummyNode(boolean master, boolean data, boolean ingest) {
|
||||
final Set<String> roles = new TreeSet<>();
|
||||
if (master) {
|
||||
roles.add("master");
|
||||
}
|
||||
if (data) {
|
||||
roles.add("data");
|
||||
}
|
||||
if (ingest) {
|
||||
roles.add("ingest");
|
||||
}
|
||||
return new Node(new HttpHost("dummy"), Collections.<HttpHost>emptySet(),
|
||||
randomAsciiAlphanumOfLength(5), randomAsciiAlphanumOfLength(5),
|
||||
new Roles(master, data, ingest),
|
||||
new Roles(roles),
|
||||
Collections.<String, List<String>>emptyMap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,10 +23,12 @@ import org.apache.http.HttpHost;
|
|||
import org.elasticsearch.client.Node.Roles;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -43,8 +45,8 @@ public class NodeTests extends RestClientTestCase {
|
|||
assertEquals("[host=http://1]", new Node(new HttpHost("1")).toString());
|
||||
assertEquals("[host=http://1, attributes={foo=[bar], baz=[bort, zoom]}]",
|
||||
new Node(new HttpHost("1"), null, null, null, null, attributes).toString());
|
||||
assertEquals("[host=http://1, roles=mdi]", new Node(new HttpHost("1"),
|
||||
null, null, null, new Roles(true, true, true), null).toString());
|
||||
assertEquals("[host=http://1, roles=data,ingest,master]", new Node(new HttpHost("1"),
|
||||
null, null, null, new Roles(new TreeSet<>(Arrays.asList("master", "data", "ingest"))), null).toString());
|
||||
assertEquals("[host=http://1, version=ver]", new Node(new HttpHost("1"),
|
||||
null, null, "ver", null, null).toString());
|
||||
assertEquals("[host=http://1, name=nam]", new Node(new HttpHost("1"),
|
||||
|
@ -52,10 +54,10 @@ public class NodeTests extends RestClientTestCase {
|
|||
assertEquals("[host=http://1, bound=[http://1, http://2]]", new Node(new HttpHost("1"),
|
||||
new HashSet<>(Arrays.asList(new HttpHost("1"), new HttpHost("2"))), null, null, null, null).toString());
|
||||
assertEquals(
|
||||
"[host=http://1, bound=[http://1, http://2], name=nam, version=ver, roles=m, attributes={foo=[bar], baz=[bort, zoom]}]",
|
||||
"[host=http://1, bound=[http://1, http://2], "
|
||||
+ "name=nam, version=ver, roles=master, attributes={foo=[bar], baz=[bort, zoom]}]",
|
||||
new Node(new HttpHost("1"), new HashSet<>(Arrays.asList(new HttpHost("1"), new HttpHost("2"))),
|
||||
"nam", "ver", new Roles(true, false, false), attributes).toString());
|
||||
|
||||
"nam", "ver", new Roles(Collections.singleton("master")), attributes).toString());
|
||||
}
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
|
@ -64,7 +66,7 @@ public class NodeTests extends RestClientTestCase {
|
|||
randomBoolean() ? null : singleton(host),
|
||||
randomBoolean() ? null : randomAsciiAlphanumOfLength(5),
|
||||
randomBoolean() ? null : randomAsciiAlphanumOfLength(5),
|
||||
randomBoolean() ? null : new Roles(true, true, true),
|
||||
randomBoolean() ? null : new Roles(new TreeSet<>(Arrays.asList("master", "data", "ingest"))),
|
||||
randomBoolean() ? null : singletonMap("foo", singletonList("bar")));
|
||||
assertFalse(node.equals(null));
|
||||
assertTrue(node.equals(node));
|
||||
|
@ -82,7 +84,7 @@ public class NodeTests extends RestClientTestCase {
|
|||
assertFalse(node.equals(new Node(host, node.getBoundHosts(), node.getName(),
|
||||
node.getVersion() + "changed", node.getRoles(), node.getAttributes())));
|
||||
assertFalse(node.equals(new Node(host, node.getBoundHosts(), node.getName(),
|
||||
node.getVersion(), new Roles(false, false, false), node.getAttributes())));
|
||||
node.getVersion(), new Roles(Collections.emptySet()), node.getAttributes())));
|
||||
assertFalse(node.equals(new Node(host, node.getBoundHosts(), node.getName(),
|
||||
node.getVersion(), node.getRoles(), singletonMap("bort", singletonList("bing")))));
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
@ -61,9 +63,19 @@ public class PreferHasAttributeNodeSelectorTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
private static Node dummyNode(Map<String, List<String>> attributes) {
|
||||
final Set<String> roles = new TreeSet<>();
|
||||
if (randomBoolean()) {
|
||||
roles.add("master");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
roles.add("data");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
roles.add("ingest");
|
||||
}
|
||||
return new Node(new HttpHost("dummy"), Collections.<HttpHost>emptySet(),
|
||||
randomAsciiAlphanumOfLength(5), randomAsciiAlphanumOfLength(5),
|
||||
new Roles(randomBoolean(), randomBoolean(), randomBoolean()),
|
||||
new Roles(roles),
|
||||
attributes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,13 @@ import org.junit.After;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
|
@ -271,7 +273,9 @@ public class RestClientMultipleHostsTests extends RestClientTestCase {
|
|||
RestClient restClient = createRestClient(NodeSelector.SKIP_DEDICATED_MASTERS);
|
||||
List<Node> newNodes = new ArrayList<>(nodes.size());
|
||||
for (int i = 0; i < nodes.size(); i++) {
|
||||
Node.Roles roles = i == 0 ? new Node.Roles(false, true, true) : new Node.Roles(true, false, false);
|
||||
Node.Roles roles = i == 0 ?
|
||||
new Node.Roles(new TreeSet<>(Arrays.asList("data", "ingest"))) :
|
||||
new Node.Roles(new TreeSet<>(Arrays.asList("master")));
|
||||
newNodes.add(new Node(nodes.get(i).getHost(), null, null, null, roles, null));
|
||||
}
|
||||
restClient.setNodes(newNodes);
|
||||
|
|
|
@ -27,10 +27,10 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.client.Node;
|
||||
import org.elasticsearch.client.Node.Roles;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.Node.Roles;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -42,6 +42,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -152,9 +153,7 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
|
|||
final Map<String, String> protoAttributes = new HashMap<String, String>();
|
||||
|
||||
boolean sawRoles = false;
|
||||
boolean master = false;
|
||||
boolean data = false;
|
||||
boolean ingest = false;
|
||||
final Set<String> roles = new TreeSet<>();
|
||||
|
||||
String fieldName = null;
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
|
@ -207,19 +206,7 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
|
|||
if ("roles".equals(fieldName)) {
|
||||
sawRoles = true;
|
||||
while (parser.nextToken() != JsonToken.END_ARRAY) {
|
||||
switch (parser.getText()) {
|
||||
case "master":
|
||||
master = true;
|
||||
break;
|
||||
case "data":
|
||||
data = true;
|
||||
break;
|
||||
case "ingest":
|
||||
ingest = true;
|
||||
break;
|
||||
default:
|
||||
logger.warn("unknown role [" + parser.getText() + "] on node [" + nodeId + "]");
|
||||
}
|
||||
roles.add(parser.getText());
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
|
@ -268,15 +255,19 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
|
|||
boolean clientAttribute = v2RoleAttributeValue(realAttributes, "client", false);
|
||||
Boolean masterAttribute = v2RoleAttributeValue(realAttributes, "master", null);
|
||||
Boolean dataAttribute = v2RoleAttributeValue(realAttributes, "data", null);
|
||||
master = masterAttribute == null ? false == clientAttribute : masterAttribute;
|
||||
data = dataAttribute == null ? false == clientAttribute : dataAttribute;
|
||||
if ((masterAttribute == null && false == clientAttribute) || masterAttribute) {
|
||||
roles.add("master");
|
||||
}
|
||||
if ((dataAttribute == null && false == clientAttribute) || dataAttribute) {
|
||||
roles.add("data");
|
||||
}
|
||||
} else {
|
||||
assert sawRoles : "didn't see roles for [" + nodeId + "]";
|
||||
}
|
||||
assert boundHosts.contains(publishedHost) :
|
||||
"[" + nodeId + "] doesn't make sense! publishedHost should be in boundHosts";
|
||||
logger.trace("adding node [" + nodeId + "]");
|
||||
return new Node(publishedHost, boundHosts, name, version, new Roles(master, data, ingest),
|
||||
return new Node(publishedHost, boundHosts, name, version, new Roles(roles),
|
||||
unmodifiableMap(realAttributes));
|
||||
}
|
||||
|
||||
|
|
|
@ -19,13 +19,14 @@
|
|||
|
||||
package org.elasticsearch.client.sniff;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.InputStreamEntity;
|
||||
import org.elasticsearch.client.Node;
|
||||
import org.elasticsearch.client.RestClientTestCase;
|
||||
import org.elasticsearch.client.Node.Roles;
|
||||
import org.elasticsearch.client.RestClientTestCase;
|
||||
import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer.Scheme;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -36,8 +37,7 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
@ -49,6 +49,7 @@ import static org.junit.Assert.assertThat;
|
|||
* versions of Elasticsearch.
|
||||
*/
|
||||
public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
|
||||
|
||||
private void checkFile(String file, Node... expected) throws IOException {
|
||||
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(file);
|
||||
if (in == null) {
|
||||
|
@ -107,6 +108,18 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
|
|||
node(9207, "c2", "6.0.0", false, false, true));
|
||||
}
|
||||
|
||||
public void test7x() throws IOException {
|
||||
checkFile("7.3.0_nodes_http.json",
|
||||
node(9200, "m1", "7.3.0", "master", "ingest"),
|
||||
node(9201, "m2", "7.3.0", "master", "data", "ingest"),
|
||||
node(9202, "m3", "7.3.0", "master", "ingest"),
|
||||
node(9203, "d1", "7.3.0", "data", "ingest", "ml"),
|
||||
node(9204, "d2", "7.3.0", "data", "ingest"),
|
||||
node(9205, "d3", "7.3.0", "data", "ingest"),
|
||||
node(9206, "c1", "7.3.0", "ingest"),
|
||||
node(9207, "c2", "7.3.0", "ingest"));
|
||||
}
|
||||
|
||||
public void testParsingPublishAddressWithPreES7Format() throws IOException {
|
||||
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("es6_nodes_publication_address_format.json");
|
||||
|
||||
|
@ -130,6 +143,24 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
private Node node(int port, String name, String version, boolean master, boolean data, boolean ingest) {
|
||||
final Set<String> roles = new TreeSet<>();
|
||||
if (master) {
|
||||
roles.add("master");
|
||||
}
|
||||
if (data) {
|
||||
roles.add("data");
|
||||
}
|
||||
if (ingest) {
|
||||
roles.add("ingest");
|
||||
}
|
||||
return node(port, name, version, roles);
|
||||
}
|
||||
|
||||
private Node node(int port, String name, String version, String... roles) {
|
||||
return node(port, name, version, new TreeSet<>(Arrays.asList(roles)));
|
||||
}
|
||||
|
||||
private Node node(int port, String name, String version, Set<String> roles) {
|
||||
HttpHost host = new HttpHost("127.0.0.1", port);
|
||||
Set<HttpHost> boundHosts = new HashSet<>(2);
|
||||
boundHosts.add(host);
|
||||
|
@ -138,6 +169,7 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
|
|||
attributes.put("dummy", singletonList("everyone_has_me"));
|
||||
attributes.put("number", singletonList(name.substring(1)));
|
||||
attributes.put("array", Arrays.asList(name.substring(0, 1), name.substring(1)));
|
||||
return new Node(host, boundHosts, name, version, new Roles(master, data, ingest), attributes);
|
||||
return new Node(host, boundHosts, name, version, new Roles(new TreeSet<>(roles)), attributes);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
@ -211,9 +212,20 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
|
|||
attributes.put("attr" + j, values);
|
||||
}
|
||||
|
||||
final Set<String> nodeRoles = new TreeSet<>();
|
||||
if (randomBoolean()) {
|
||||
nodeRoles.add("master");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
nodeRoles.add("data");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
nodeRoles.add("ingest");
|
||||
}
|
||||
|
||||
Node node = new Node(publishHost, boundHosts, randomAsciiAlphanumOfLength(5),
|
||||
randomAsciiAlphanumOfLength(5),
|
||||
new Node.Roles(randomBoolean(), randomBoolean(), randomBoolean()),
|
||||
new Node.Roles(nodeRoles),
|
||||
attributes);
|
||||
|
||||
generator.writeObjectFieldStart(nodeId);
|
||||
|
|
|
@ -0,0 +1,218 @@
|
|||
{
|
||||
"_nodes": {
|
||||
"total": 8,
|
||||
"successful": 8,
|
||||
"failed": 0
|
||||
},
|
||||
"cluster_name": "elasticsearch",
|
||||
"nodes": {
|
||||
"ikXK_skVTfWkhONhldnbkw": {
|
||||
"name": "m1",
|
||||
"transport_address": "127.0.0.1:9300",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"master",
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "1",
|
||||
"array.0": "m",
|
||||
"array.1": "1"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9200",
|
||||
"127.0.0.1:9200"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9200",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"TMHa34w4RqeuYoHCfJGXZg": {
|
||||
"name": "m2",
|
||||
"transport_address": "127.0.0.1:9301",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"master",
|
||||
"data",
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "2",
|
||||
"array.0": "m",
|
||||
"array.1": "2"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9201",
|
||||
"127.0.0.1:9201"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9201",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"lzaMRJTVT166sgVZdQ5thA": {
|
||||
"name": "m3",
|
||||
"transport_address": "127.0.0.1:9302",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"master",
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "3",
|
||||
"array.0": "m",
|
||||
"array.1": "3"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9202",
|
||||
"127.0.0.1:9202"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9202",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"tGP5sUecSd6BLTWk1NWF8Q": {
|
||||
"name": "d1",
|
||||
"transport_address": "127.0.0.1:9303",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"data",
|
||||
"ingest",
|
||||
"ml"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "1",
|
||||
"array.0": "d",
|
||||
"array.1": "1"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9203",
|
||||
"127.0.0.1:9203"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9203",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"c1UgW5ROTkSa2YnM_T56tw": {
|
||||
"name": "d2",
|
||||
"transport_address": "127.0.0.1:9304",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"data",
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "2",
|
||||
"array.0": "d",
|
||||
"array.1": "2"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9204",
|
||||
"127.0.0.1:9204"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9204",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"QM9yjqjmS72MstpNYV_trg": {
|
||||
"name": "d3",
|
||||
"transport_address": "127.0.0.1:9305",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"data",
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "3",
|
||||
"array.0": "d",
|
||||
"array.1": "3"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9205",
|
||||
"127.0.0.1:9205"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9205",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"wLtzAssoQYeX_4TstgCj0Q": {
|
||||
"name": "c1",
|
||||
"transport_address": "127.0.0.1:9306",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "1",
|
||||
"array.0": "c",
|
||||
"array.1": "1"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9206",
|
||||
"127.0.0.1:9206"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9206",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
},
|
||||
"ONOzpst8TH-ZebG7fxGwaA": {
|
||||
"name": "c2",
|
||||
"transport_address": "127.0.0.1:9307",
|
||||
"host": "127.0.0.1",
|
||||
"ip": "127.0.0.1",
|
||||
"version": "7.3.0",
|
||||
"build_hash": "8f0685b",
|
||||
"roles": [
|
||||
"ingest"
|
||||
],
|
||||
"attributes": {
|
||||
"dummy": "everyone_has_me",
|
||||
"number": "2",
|
||||
"array.0": "c",
|
||||
"array.1": "2"
|
||||
},
|
||||
"http": {
|
||||
"bound_address": [
|
||||
"[::1]:9207",
|
||||
"127.0.0.1:9207"
|
||||
],
|
||||
"publish_address": "127.0.0.1:9207",
|
||||
"max_content_length_in_bytes": 104857600
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue