Core: Default node.name to the hostname (#33677)
Changes the default of the `node.name` setting to the hostname of the machine on which Elasticsearch is running. Previously it was the first 8 characters of the node id. This had the advantage of producing a unique name even when the node name isn't configured but the disadvantage of being unrecognizable and not being available until fairly late in the startup process. Of particular interest is that it isn't available until after logging is configured. This forces us to use a volatile read whenever we add the node name to the log. Using the hostname is available immediately on startup and is generally recognizable but has the disadvantage of not being unique when run on machines that don't set their hostname or when multiple elasticsearch processes are run on the same host. I believe that, taken together, it is better to default to the hostname. 1. Running multiple copies of Elasticsearch on the same node is a fairly advanced feature. We do it all the as part of the elasticsearch build for testing but we make sure to set the node name then. 2. That the node.name defaults to some flavor of "localhost" on an unconfigured box feels like it isn't going to come up too much in production. I expect most production deployments to at least set the hostname. As a bonus, production deployments need no longer set the node name in most cases. At least in my experience most folks set it to the hostname anyway.
This commit is contained in:
parent
3df285d9f0
commit
26c4f1fb6c
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.test.rest;
|
package org.elasticsearch.test.rest;
|
||||||
|
|
||||||
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
||||||
|
import org.hamcrest.Matcher;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
@ -29,9 +30,16 @@ import java.nio.file.Path;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
protected BufferedReader openReader(Path logFile) throws IOException {
|
protected Matcher<String> nodeNameMatcher() {
|
||||||
|
return is("node-0");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected BufferedReader openReader(Path logFile) {
|
||||||
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
||||||
try {
|
try {
|
||||||
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
||||||
|
|
|
@ -236,29 +236,36 @@ If everything goes well with installation, you should see a bunch of messages th
|
||||||
|
|
||||||
["source","sh",subs="attributes,callouts"]
|
["source","sh",subs="attributes,callouts"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
[2016-09-16T14:17:51,251][INFO ][o.e.n.Node ] [] initializing ...
|
[2018-09-13T12:20:01,766][INFO ][o.e.e.NodeEnvironment ] [localhost.localdomain] using [1] data paths, mounts [[/home (/dev/mapper/fedora-home)]], net usable_space [335.3gb], net total_space [410.3gb], types [ext4]
|
||||||
[2016-09-16T14:17:51,329][INFO ][o.e.e.NodeEnvironment ] [6-bjhwl] using [1] data paths, mounts [[/ (/dev/sda1)]], net usable_space [317.7gb], net total_space [453.6gb], spins? [no], types [ext4]
|
[2018-09-13T12:20:01,772][INFO ][o.e.e.NodeEnvironment ] [localhost.localdomain] heap size [990.7mb], compressed ordinary object pointers [true]
|
||||||
[2016-09-16T14:17:51,330][INFO ][o.e.e.NodeEnvironment ] [6-bjhwl] heap size [1.9gb], compressed ordinary object pointers [true]
|
[2018-09-13T12:20:01,774][INFO ][o.e.n.Node ] [localhost.localdomain] node name [localhost.localdomain], node ID [B0aEHNagTiWx7SYj-l4NTw]
|
||||||
[2016-09-16T14:17:51,333][INFO ][o.e.n.Node ] [6-bjhwl] node name [6-bjhwl] derived from node ID; set [node.name] to override
|
[2018-09-13T12:20:01,775][INFO ][o.e.n.Node ] [localhost.localdomain] version[{version}], pid[13030], build[oss/zip/77fc20e/2018-09-13T15:37:57.478402Z], OS[Linux/4.16.11-100.fc26.x86_64/amd64], JVM["Oracle Corporation"/OpenJDK 64-Bit Server VM/10/10+46]
|
||||||
[2016-09-16T14:17:51,334][INFO ][o.e.n.Node ] [6-bjhwl] version[{version}], pid[21261], build[f5daa16/2016-09-16T09:12:24.346Z], OS[Linux/4.4.0-36-generic/amd64], JVM[Oracle Corporation/Java HotSpot(TM) 64-Bit Server VM/1.8.0_60/25.60-b23]
|
[2018-09-13T12:20:01,775][INFO ][o.e.n.Node ] [localhost.localdomain] JVM arguments [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.io.tmpdir=/tmp/elasticsearch.LN1ctLCi, -XX:+HeapDumpOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -XX:UseAVX=2, -Dio.netty.allocator.type=unpooled, -Des.path.home=/home/manybubbles/Workspaces/Elastic/master/elasticsearch/qa/unconfigured-node-name/build/cluster/integTestCluster node0/elasticsearch-7.0.0-alpha1-SNAPSHOT, -Des.path.conf=/home/manybubbles/Workspaces/Elastic/master/elasticsearch/qa/unconfigured-node-name/build/cluster/integTestCluster node0/elasticsearch-7.0.0-alpha1-SNAPSHOT/config, -Des.distribution.flavor=oss, -Des.distribution.type=zip]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [aggs-matrix-stats]
|
[2018-09-13T12:20:02,543][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [aggs-matrix-stats]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [ingest-common]
|
[2018-09-13T12:20:02,543][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [analysis-common]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [lang-expression]
|
[2018-09-13T12:20:02,543][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [ingest-common]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [lang-mustache]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [lang-expression]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [lang-painless]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [lang-mustache]
|
||||||
[2016-09-16T14:17:51,967][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [percolator]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [lang-painless]
|
||||||
[2016-09-16T14:17:51,968][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [reindex]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [mapper-extras]
|
||||||
[2016-09-16T14:17:51,968][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [transport-netty3]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [parent-join]
|
||||||
[2016-09-16T14:17:51,968][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded module [transport-netty4]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [percolator]
|
||||||
[2016-09-16T14:17:51,968][INFO ][o.e.p.PluginsService ] [6-bjhwl] loaded plugin [mapper-murmur3]
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [rank-eval]
|
||||||
[2016-09-16T14:17:53,521][INFO ][o.e.n.Node ] [6-bjhwl] initialized
|
[2018-09-13T12:20:02,544][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [reindex]
|
||||||
[2016-09-16T14:17:53,521][INFO ][o.e.n.Node ] [6-bjhwl] starting ...
|
[2018-09-13T12:20:02,545][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [repository-url]
|
||||||
[2016-09-16T14:17:53,671][INFO ][o.e.t.TransportService ] [6-bjhwl] publish_address {192.168.8.112:9300}, bound_addresses {{192.168.8.112:9300}
|
[2018-09-13T12:20:02,545][INFO ][o.e.p.PluginsService ] [localhost.localdomain] loaded module [transport-netty4]
|
||||||
[2016-09-16T14:17:53,676][WARN ][o.e.b.BootstrapCheck ] [6-bjhwl] max virtual memory areas vm.max_map_count [65530] likely too low, increase to at least [262144]
|
[2018-09-13T12:20:02,545][INFO ][o.e.p.PluginsService ] [localhost.localdomain] no plugins loaded
|
||||||
[2016-09-16T14:17:56,718][INFO ][o.e.c.s.ClusterService ] [6-bjhwl] new_master {6-bjhwl}{6-bjhwl4TkajjoD2oEipnQ}{8m3SNKoFR6yQl1I0JUfPig}{192.168.8.112}{192.168.8.112:9300}, reason: zen-disco-elected-as-master ([0] nodes joined)
|
[2018-09-13T12:20:04,657][INFO ][o.e.d.DiscoveryModule ] [localhost.localdomain] using discovery type [zen]
|
||||||
[2016-09-16T14:17:56,731][INFO ][o.e.h.HttpServer ] [6-bjhwl] publish_address {192.168.8.112:9200}, bound_addresses {[::1]:9200}, {192.168.8.112:9200}
|
[2018-09-13T12:20:05,006][INFO ][o.e.n.Node ] [localhost.localdomain] initialized
|
||||||
[2016-09-16T14:17:56,732][INFO ][o.e.g.GatewayService ] [6-bjhwl] recovered [0] indices into cluster_state
|
[2018-09-13T12:20:05,007][INFO ][o.e.n.Node ] [localhost.localdomain] starting ...
|
||||||
[2016-09-16T14:17:56,748][INFO ][o.e.n.Node ] [6-bjhwl] started
|
[2018-09-13T12:20:05,202][INFO ][o.e.t.TransportService ] [localhost.localdomain] publish_address {127.0.0.1:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300}
|
||||||
|
[2018-09-13T12:20:05,221][WARN ][o.e.b.BootstrapChecks ] [localhost.localdomain] max file descriptors [4096] for elasticsearch process is too low, increase to at least [65536]
|
||||||
|
[2018-09-13T12:20:05,221][WARN ][o.e.b.BootstrapChecks ] [localhost.localdomain] max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]
|
||||||
|
[2018-09-13T12:20:08,355][INFO ][o.e.c.s.MasterService ] [localhost.localdomain] zen-disco-elected-as-master ([0] nodes joined)[, ], reason: master node changed {previous [], current [{localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test}]}
|
||||||
|
[2018-09-13T12:20:08,360][INFO ][o.e.c.s.ClusterApplierService] [localhost.localdomain] master node changed {previous [], current [{localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test}]}, reason: apply cluster state (from master [master {localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test} committed version [1] source [zen-disco-elected-as-master ([0] nodes joined)[, ]]])
|
||||||
|
[2018-09-13T12:20:08,384][INFO ][o.e.h.n.Netty4HttpServerTransport] [localhost.localdomain] publish_address {127.0.0.1:9200}, bound_addresses {[::1]:9200}, {127.0.0.1:9200}
|
||||||
|
[2018-09-13T12:20:08,384][INFO ][o.e.n.Node ] [localhost.localdomain] started
|
||||||
|
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Without going too much into detail, we can see that our node named "6-bjhwl" (which will be a different set of characters in your case) has started and elected itself as a master in a single cluster. Don't worry yet at the moment what master means. The main thing that is important here is that we have started one node within one cluster.
|
Without going too much into detail, we can see that our node named "6-bjhwl" (which will be a different set of characters in your case) has started and elected itself as a master in a single cluster. Don't worry yet at the moment what master means. The main thing that is important here is that we have started one node within one cluster.
|
||||||
|
|
|
@ -2,6 +2,12 @@
|
||||||
|
|
||||||
=== Settings changes
|
=== Settings changes
|
||||||
|
|
||||||
|
==== The default for `node.name` is now the hostname
|
||||||
|
|
||||||
|
`node.name` now defaults to the hostname at the time when Elasticsearch
|
||||||
|
is started. Previously the default node name was the first eight characters
|
||||||
|
of the node id. It can still be configured explicitly in `elasticsearch.yml`.
|
||||||
|
|
||||||
==== Percolator
|
==== Percolator
|
||||||
|
|
||||||
* The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of
|
* The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of
|
||||||
|
|
|
@ -1,22 +1,13 @@
|
||||||
[[node.name]]
|
[[node.name]]
|
||||||
=== `node.name`
|
=== `node.name`
|
||||||
|
|
||||||
By default, Elasticsearch will use the first seven characters of the randomly
|
Elasticsearch uses `node.name` as a human readable identifier for a
|
||||||
generated UUID as the node id. Note that the node id is persisted and does
|
particular instance of Elasticsearch so it is included in the response
|
||||||
not change when a node restarts and therefore the default node name will also
|
of many APIs. It defaults to the hostname that the machine has when
|
||||||
not change.
|
Elasticsearch starts but can be configured explicitly in
|
||||||
|
`elasticsearch.yml` as follows:
|
||||||
It is worth configuring a more meaningful name which will also have the
|
|
||||||
advantage of persisting after restarting the node:
|
|
||||||
|
|
||||||
[source,yaml]
|
[source,yaml]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
node.name: prod-data-2
|
node.name: prod-data-2
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The `node.name` can also be set to the server's HOSTNAME as follows:
|
|
||||||
|
|
||||||
[source,yaml]
|
|
||||||
--------------------------------------------------
|
|
||||||
node.name: ${HOSTNAME}
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
|
@ -4,10 +4,9 @@
|
||||||
Elasticsearch uses https://logging.apache.org/log4j/2.x/[Log4j 2] for
|
Elasticsearch uses https://logging.apache.org/log4j/2.x/[Log4j 2] for
|
||||||
logging. Log4j 2 can be configured using the log4j2.properties
|
logging. Log4j 2 can be configured using the log4j2.properties
|
||||||
file. Elasticsearch exposes three properties, `${sys:es.logs.base_path}`,
|
file. Elasticsearch exposes three properties, `${sys:es.logs.base_path}`,
|
||||||
`${sys:es.logs.cluster_name}`, and `${sys:es.logs.node_name}` (if the node name
|
`${sys:es.logs.cluster_name}`, and `${sys:es.logs.node_name}` that can be
|
||||||
is explicitly set via `node.name`) that can be referenced in the configuration
|
referenced in the configuration file to determine the location of the log
|
||||||
file to determine the location of the log files. The property
|
files. The property `${sys:es.logs.base_path}` will resolve to the log directory,
|
||||||
`${sys:es.logs.base_path}` will resolve to the log directory,
|
|
||||||
`${sys:es.logs.cluster_name}` will resolve to the cluster name (used as the
|
`${sys:es.logs.cluster_name}` will resolve to the cluster name (used as the
|
||||||
prefix of log filenames in the default configuration), and
|
prefix of log filenames in the default configuration), and
|
||||||
`${sys:es.logs.node_name}` will resolve to the node name (if the node name is
|
`${sys:es.logs.node_name}` will resolve to the node name (if the node name is
|
||||||
|
|
|
@ -340,26 +340,22 @@ public class EvilLoggerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testProperties() throws IOException, UserException {
|
public void testProperties() throws IOException, UserException {
|
||||||
final Settings.Builder builder = Settings.builder().put("cluster.name", randomAlphaOfLength(16));
|
final Settings settings = Settings.builder()
|
||||||
if (randomBoolean()) {
|
.put("cluster.name", randomAlphaOfLength(16))
|
||||||
builder.put("node.name", randomAlphaOfLength(16));
|
.put("node.name", randomAlphaOfLength(16))
|
||||||
}
|
.build();
|
||||||
final Settings settings = builder.build();
|
|
||||||
setupLogging("minimal", settings);
|
setupLogging("minimal", settings);
|
||||||
|
|
||||||
assertNotNull(System.getProperty("es.logs.base_path"));
|
assertNotNull(System.getProperty("es.logs.base_path"));
|
||||||
|
|
||||||
assertThat(System.getProperty("es.logs.cluster_name"), equalTo(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()));
|
assertThat(System.getProperty("es.logs.cluster_name"), equalTo(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()));
|
||||||
if (Node.NODE_NAME_SETTING.exists(settings)) {
|
assertThat(System.getProperty("es.logs.node_name"), equalTo(Node.NODE_NAME_SETTING.get(settings)));
|
||||||
assertThat(System.getProperty("es.logs.node_name"), equalTo(Node.NODE_NAME_SETTING.get(settings)));
|
|
||||||
} else {
|
|
||||||
assertNull(System.getProperty("es.logs.node_name"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoNodeNameInPatternWarning() throws IOException, UserException {
|
public void testNoNodeNameInPatternWarning() throws IOException, UserException {
|
||||||
|
String nodeName = randomAlphaOfLength(16);
|
||||||
|
LogConfigurator.setNodeName(nodeName);
|
||||||
setupLogging("no_node_name");
|
setupLogging("no_node_name");
|
||||||
|
|
||||||
final String path =
|
final String path =
|
||||||
System.getProperty("es.logs.base_path") +
|
System.getProperty("es.logs.base_path") +
|
||||||
System.getProperty("file.separator") +
|
System.getProperty("file.separator") +
|
||||||
|
@ -368,10 +364,10 @@ public class EvilLoggerTests extends ESTestCase {
|
||||||
assertThat(events.size(), equalTo(2));
|
assertThat(events.size(), equalTo(2));
|
||||||
final String location = "org.elasticsearch.common.logging.LogConfigurator";
|
final String location = "org.elasticsearch.common.logging.LogConfigurator";
|
||||||
// the first message is a warning for unsupported configuration files
|
// the first message is a warning for unsupported configuration files
|
||||||
assertLogLine(events.get(0), Level.WARN, location, "\\[unknown\\] Some logging configurations have %marker but don't "
|
assertLogLine(events.get(0), Level.WARN, location, "\\[" + nodeName + "\\] Some logging configurations have "
|
||||||
+ "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users "
|
+ "%marker but don't have %node_name. We will automatically add %node_name to the pattern to ease the "
|
||||||
+ "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace "
|
+ "migration for users who customize log4j2.properties but will stop this behavior in 7.0. You should "
|
||||||
+ "`%node_name` with `\\[%node_name\\]%marker ` in these locations:");
|
+ "manually replace `%node_name` with `\\[%node_name\\]%marker ` in these locations:");
|
||||||
if (Constants.WINDOWS) {
|
if (Constants.WINDOWS) {
|
||||||
assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties"));
|
assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties"));
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -52,7 +52,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||||
IOException ioException = expectThrows(IOException.class, () -> {
|
IOException ioException = expectThrows(IOException.class, () -> {
|
||||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
});
|
});
|
||||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith(path.toString()));
|
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith(path.toString()));
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||||
IOException ioException = expectThrows(IOException.class, () -> {
|
IOException ioException = expectThrows(IOException.class, () -> {
|
||||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
});
|
});
|
||||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||||
IOException ioException = expectThrows(IOException.class, () -> {
|
IOException ioException = expectThrows(IOException.class, () -> {
|
||||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
});
|
});
|
||||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.unconfigured_node_name;
|
package org.elasticsearch.unconfigured_node_name;
|
||||||
|
|
||||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
|
||||||
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
||||||
|
import org.hamcrest.Matcher;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
@ -30,11 +30,16 @@ import java.nio.file.Path;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
protected BufferedReader openReader(Path logFile) throws IOException {
|
protected Matcher<String> nodeNameMatcher() {
|
||||||
assumeTrue("We log a line without the node name if we can't install the seccomp filters",
|
return not("");
|
||||||
BootstrapInfo.isSystemCallFilterInstalled());
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected BufferedReader openReader(Path logFile) {
|
||||||
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
||||||
try {
|
try {
|
||||||
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
||||||
|
@ -43,11 +48,4 @@ public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDummy() {
|
|
||||||
/* Dummy test case so that when we run this test on a platform that
|
|
||||||
* does not support our syscall filters and we skip the test above
|
|
||||||
* we don't fail the entire test run because we skipped all the tests.
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,11 +216,6 @@ final class Bootstrap {
|
||||||
final BoundTransportAddress boundTransportAddress, List<BootstrapCheck> checks) throws NodeValidationException {
|
final BoundTransportAddress boundTransportAddress, List<BootstrapCheck> checks) throws NodeValidationException {
|
||||||
BootstrapChecks.check(context, boundTransportAddress, checks);
|
BootstrapChecks.check(context, boundTransportAddress, checks);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void registerDerivedNodeNameWithLogger(String nodeName) {
|
|
||||||
LogConfigurator.setNodeName(nodeName);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,7 +255,9 @@ final class Bootstrap {
|
||||||
if (secureSettings != null) {
|
if (secureSettings != null) {
|
||||||
builder.setSecureSettings(secureSettings);
|
builder.setSecureSettings(secureSettings);
|
||||||
}
|
}
|
||||||
return InternalSettingsPreparer.prepareEnvironment(builder.build(), Collections.emptyMap(), configPath);
|
return InternalSettingsPreparer.prepareEnvironment(builder.build(), Collections.emptyMap(), configPath,
|
||||||
|
// HOSTNAME is set by elasticsearch-env and elasticsearch-env.bat so it is always available
|
||||||
|
() -> System.getenv("HOSTNAME"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void start() throws NodeValidationException {
|
private void start() throws NodeValidationException {
|
||||||
|
@ -293,9 +290,7 @@ final class Bootstrap {
|
||||||
final SecureSettings keystore = loadSecureSettings(initialEnv);
|
final SecureSettings keystore = loadSecureSettings(initialEnv);
|
||||||
final Environment environment = createEnvironment(pidFile, keystore, initialEnv.settings(), initialEnv.configFile());
|
final Environment environment = createEnvironment(pidFile, keystore, initialEnv.settings(), initialEnv.configFile());
|
||||||
|
|
||||||
if (Node.NODE_NAME_SETTING.exists(environment.settings())) {
|
LogConfigurator.setNodeName(Node.NODE_NAME_SETTING.get(environment.settings()));
|
||||||
LogConfigurator.setNodeName(Node.NODE_NAME_SETTING.get(environment.settings()));
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
LogConfigurator.configure(environment);
|
LogConfigurator.configure(environment);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
|
|
@ -92,7 +92,10 @@ public abstract class EnvironmentAwareCommand extends Command {
|
||||||
if (esPathConf == null) {
|
if (esPathConf == null) {
|
||||||
throw new UserException(ExitCodes.CONFIG, "the system property [es.path.conf] must be set");
|
throw new UserException(ExitCodes.CONFIG, "the system property [es.path.conf] must be set");
|
||||||
}
|
}
|
||||||
return InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, settings, getConfigPath(esPathConf));
|
return InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, settings,
|
||||||
|
getConfigPath(esPathConf),
|
||||||
|
// HOSTNAME is set by elasticsearch-env and elasticsearch-env.bat so it is always available
|
||||||
|
() -> System.getenv("HOSTNAME"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressForbidden(reason = "need path to construct environment")
|
@SuppressForbidden(reason = "need path to construct environment")
|
||||||
|
|
|
@ -279,8 +279,7 @@ public class LogConfigurator {
|
||||||
* {@code es.logs.cluster_name} the cluster name, used as the prefix of log filenames in the default configuration
|
* {@code es.logs.cluster_name} the cluster name, used as the prefix of log filenames in the default configuration
|
||||||
* </li>
|
* </li>
|
||||||
* <li>
|
* <li>
|
||||||
* {@code es.logs.node_name} the node name, can be used as part of log filenames (only exposed if {@link Node#NODE_NAME_SETTING} is
|
* {@code es.logs.node_name} the node name, can be used as part of log filenames
|
||||||
* explicitly set)
|
|
||||||
* </li>
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
*
|
*
|
||||||
|
@ -291,9 +290,7 @@ public class LogConfigurator {
|
||||||
private static void setLogConfigurationSystemProperty(final Path logsPath, final Settings settings) {
|
private static void setLogConfigurationSystemProperty(final Path logsPath, final Settings settings) {
|
||||||
System.setProperty("es.logs.base_path", logsPath.toString());
|
System.setProperty("es.logs.base_path", logsPath.toString());
|
||||||
System.setProperty("es.logs.cluster_name", ClusterName.CLUSTER_NAME_SETTING.get(settings).value());
|
System.setProperty("es.logs.cluster_name", ClusterName.CLUSTER_NAME_SETTING.get(settings).value());
|
||||||
if (Node.NODE_NAME_SETTING.exists(settings)) {
|
System.setProperty("es.logs.node_name", Node.NODE_NAME_SETTING.get(settings));
|
||||||
System.setProperty("es.logs.node_name", Node.NODE_NAME_SETTING.get(settings));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,21 +57,22 @@ public final class NodeNamePatternConverter extends LogEventPatternConverter {
|
||||||
throw new IllegalArgumentException("no options supported but options provided: "
|
throw new IllegalArgumentException("no options supported but options provided: "
|
||||||
+ Arrays.toString(options));
|
+ Arrays.toString(options));
|
||||||
}
|
}
|
||||||
return new NodeNamePatternConverter();
|
String nodeName = NODE_NAME.get();
|
||||||
|
if (nodeName == null) {
|
||||||
|
throw new IllegalStateException("the node name hasn't been set");
|
||||||
|
}
|
||||||
|
return new NodeNamePatternConverter(nodeName);
|
||||||
}
|
}
|
||||||
|
|
||||||
private NodeNamePatternConverter() {
|
private final String nodeName;
|
||||||
|
|
||||||
|
private NodeNamePatternConverter(String nodeName) {
|
||||||
super("NodeName", "node_name");
|
super("NodeName", "node_name");
|
||||||
|
this.nodeName = nodeName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void format(LogEvent event, StringBuilder toAppendTo) {
|
public void format(LogEvent event, StringBuilder toAppendTo) {
|
||||||
/*
|
toAppendTo.append(nodeName);
|
||||||
* We're not thrilled about this volatile read on every line logged but
|
|
||||||
* the alternatives are slightly terrifying and/or don't work with the
|
|
||||||
* security manager.
|
|
||||||
*/
|
|
||||||
String nodeName = NODE_NAME.get();
|
|
||||||
toAppendTo.append(nodeName == null ? "unknown" : nodeName);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,6 @@ import java.util.concurrent.Semaphore;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
import static java.util.Collections.unmodifiableSet;
|
import static java.util.Collections.unmodifiableSet;
|
||||||
|
|
||||||
|
@ -234,18 +233,14 @@ public final class NodeEnvironment implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Setup the environment.
|
* Setup the environment.
|
||||||
* @param settings settings from elasticsearch.yml
|
* @param settings settings from elasticsearch.yml
|
||||||
* @param nodeIdConsumer called as soon as the node id is available to the
|
|
||||||
* node name in log messages if it wasn't loaded from
|
|
||||||
* elasticsearch.yml
|
|
||||||
*/
|
*/
|
||||||
public NodeEnvironment(Settings settings, Environment environment, Consumer<String> nodeIdConsumer) throws IOException {
|
public NodeEnvironment(Settings settings, Environment environment) throws IOException {
|
||||||
if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) {
|
if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) {
|
||||||
nodePaths = null;
|
nodePaths = null;
|
||||||
sharedDataPath = null;
|
sharedDataPath = null;
|
||||||
locks = null;
|
locks = null;
|
||||||
nodeLockId = -1;
|
nodeLockId = -1;
|
||||||
nodeMetaData = new NodeMetaData(generateNodeId(settings));
|
nodeMetaData = new NodeMetaData(generateNodeId(settings));
|
||||||
nodeIdConsumer.accept(nodeMetaData.nodeId());
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
|
@ -295,7 +290,6 @@ public final class NodeEnvironment implements Closeable {
|
||||||
this.nodePaths = nodeLock.nodePaths;
|
this.nodePaths = nodeLock.nodePaths;
|
||||||
this.nodeLockId = nodeLock.nodeId;
|
this.nodeLockId = nodeLock.nodeId;
|
||||||
this.nodeMetaData = loadOrCreateNodeMetaData(settings, logger, nodePaths);
|
this.nodeMetaData = loadOrCreateNodeMetaData(settings, logger, nodePaths);
|
||||||
nodeIdConsumer.accept(nodeMetaData.nodeId());
|
|
||||||
|
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("using node location [{}], local_lock_id [{}]", nodePaths, nodeLockId);
|
logger.debug("using node location [{}], local_lock_id [{}]", nodePaths, nodeLockId);
|
||||||
|
|
|
@ -26,14 +26,15 @@ import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Supplier;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.ClusterName;
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.settings.SettingsException;
|
import org.elasticsearch.common.settings.SettingsException;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.node.Node;
|
||||||
|
|
||||||
public class InternalSettingsPreparer {
|
public class InternalSettingsPreparer {
|
||||||
|
|
||||||
|
@ -41,34 +42,27 @@ public class InternalSettingsPreparer {
|
||||||
private static final String TEXT_PROMPT_VALUE = "${prompt.text}";
|
private static final String TEXT_PROMPT_VALUE = "${prompt.text}";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepares the settings by gathering all elasticsearch system properties and setting defaults.
|
* Prepares settings for the transport client by gathering all
|
||||||
|
* elasticsearch system properties and setting defaults.
|
||||||
*/
|
*/
|
||||||
public static Settings prepareSettings(Settings input) {
|
public static Settings prepareSettings(Settings input) {
|
||||||
Settings.Builder output = Settings.builder();
|
Settings.Builder output = Settings.builder();
|
||||||
initializeSettings(output, input, Collections.emptyMap());
|
initializeSettings(output, input, Collections.emptyMap());
|
||||||
finalizeSettings(output);
|
finalizeSettings(output, () -> null);
|
||||||
return output.build();
|
return output.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings.
|
|
||||||
*
|
|
||||||
* @param input The custom settings to use. These are not overwritten by settings in the configuration file.
|
|
||||||
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
|
|
||||||
*/
|
|
||||||
public static Environment prepareEnvironment(Settings input) {
|
|
||||||
return prepareEnvironment(input, Collections.emptyMap(), null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings.
|
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings.
|
||||||
*
|
*
|
||||||
* @param input the custom settings to use; these are not overwritten by settings in the configuration file
|
* @param input the custom settings to use; these are not overwritten by settings in the configuration file
|
||||||
* @param properties map of properties key/value pairs (usually from the command-line)
|
* @param properties map of properties key/value pairs (usually from the command-line)
|
||||||
* @param configPath path to config directory; (use null to indicate the default)
|
* @param configPath path to config directory; (use null to indicate the default)
|
||||||
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
|
* @param defaultNodeName supplier for the default node.name if the setting isn't defined
|
||||||
|
* @return the {@link Environment}
|
||||||
*/
|
*/
|
||||||
public static Environment prepareEnvironment(Settings input, Map<String, String> properties, Path configPath) {
|
public static Environment prepareEnvironment(Settings input, Map<String, String> properties,
|
||||||
|
Path configPath, Supplier<String> defaultNodeName) {
|
||||||
// just create enough settings to build the environment, to get the config dir
|
// just create enough settings to build the environment, to get the config dir
|
||||||
Settings.Builder output = Settings.builder();
|
Settings.Builder output = Settings.builder();
|
||||||
initializeSettings(output, input, properties);
|
initializeSettings(output, input, properties);
|
||||||
|
@ -95,7 +89,7 @@ public class InternalSettingsPreparer {
|
||||||
// re-initialize settings now that the config file has been loaded
|
// re-initialize settings now that the config file has been loaded
|
||||||
initializeSettings(output, input, properties);
|
initializeSettings(output, input, properties);
|
||||||
checkSettingsForTerminalDeprecation(output);
|
checkSettingsForTerminalDeprecation(output);
|
||||||
finalizeSettings(output);
|
finalizeSettings(output, defaultNodeName);
|
||||||
|
|
||||||
environment = new Environment(output.build(), configPath);
|
environment = new Environment(output.build(), configPath);
|
||||||
|
|
||||||
|
@ -140,7 +134,7 @@ public class InternalSettingsPreparer {
|
||||||
/**
|
/**
|
||||||
* Finish preparing settings by replacing forced settings and any defaults that need to be added.
|
* Finish preparing settings by replacing forced settings and any defaults that need to be added.
|
||||||
*/
|
*/
|
||||||
private static void finalizeSettings(Settings.Builder output) {
|
private static void finalizeSettings(Settings.Builder output, Supplier<String> defaultNodeName) {
|
||||||
// allow to force set properties based on configuration of the settings provided
|
// allow to force set properties based on configuration of the settings provided
|
||||||
List<String> forcedSettings = new ArrayList<>();
|
List<String> forcedSettings = new ArrayList<>();
|
||||||
for (String setting : output.keys()) {
|
for (String setting : output.keys()) {
|
||||||
|
@ -154,9 +148,12 @@ public class InternalSettingsPreparer {
|
||||||
}
|
}
|
||||||
output.replacePropertyPlaceholders();
|
output.replacePropertyPlaceholders();
|
||||||
|
|
||||||
// put the cluster name
|
// put the cluster and node name if they aren't set
|
||||||
if (output.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) == null) {
|
if (output.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) == null) {
|
||||||
output.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).value());
|
output.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).value());
|
||||||
}
|
}
|
||||||
|
if (output.get(Node.NODE_NAME_SETTING.getKey()) == null) {
|
||||||
|
output.put(Node.NODE_NAME_SETTING.getKey(), defaultNodeName.get());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -185,9 +185,7 @@ import static java.util.stream.Collectors.toList;
|
||||||
* A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used
|
* A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used
|
||||||
* in order to use a {@link Client} to perform actions/operations against the cluster.
|
* in order to use a {@link Client} to perform actions/operations against the cluster.
|
||||||
*/
|
*/
|
||||||
public abstract class Node implements Closeable {
|
public class Node implements Closeable {
|
||||||
|
|
||||||
|
|
||||||
public static final Setting<Boolean> WRITE_PORTS_FILE_SETTING =
|
public static final Setting<Boolean> WRITE_PORTS_FILE_SETTING =
|
||||||
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
|
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
|
||||||
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
|
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
|
||||||
|
@ -251,15 +249,6 @@ public abstract class Node implements Closeable {
|
||||||
private final LocalNodeFactory localNodeFactory;
|
private final LocalNodeFactory localNodeFactory;
|
||||||
private final NodeService nodeService;
|
private final NodeService nodeService;
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs a node with the given settings.
|
|
||||||
*
|
|
||||||
* @param preparedSettings Base settings to configure the node with
|
|
||||||
*/
|
|
||||||
public Node(Settings preparedSettings) {
|
|
||||||
this(InternalSettingsPreparer.prepareEnvironment(preparedSettings));
|
|
||||||
}
|
|
||||||
|
|
||||||
public Node(Environment environment) {
|
public Node(Environment environment) {
|
||||||
this(environment, Collections.emptyList(), true);
|
this(environment, Collections.emptyList(), true);
|
||||||
}
|
}
|
||||||
|
@ -282,33 +271,10 @@ public abstract class Node implements Closeable {
|
||||||
Settings tmpSettings = Settings.builder().put(environment.settings())
|
Settings tmpSettings = Settings.builder().put(environment.settings())
|
||||||
.put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build();
|
.put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build();
|
||||||
|
|
||||||
/*
|
nodeEnvironment = new NodeEnvironment(tmpSettings, environment);
|
||||||
* Create the node environment as soon as possible so we can
|
resourcesToClose.add(nodeEnvironment);
|
||||||
* recover the node id which we might have to use to derive the
|
logger.info("node name [{}], node ID [{}]",
|
||||||
* node name. And it is important to get *that* as soon as possible
|
NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId());
|
||||||
* so that log lines can contain it.
|
|
||||||
*/
|
|
||||||
boolean nodeNameExplicitlyDefined = NODE_NAME_SETTING.exists(tmpSettings);
|
|
||||||
try {
|
|
||||||
Consumer<String> nodeIdConsumer = nodeNameExplicitlyDefined ?
|
|
||||||
nodeId -> {} : nodeId -> registerDerivedNodeNameWithLogger(nodeIdToNodeName(nodeId));
|
|
||||||
nodeEnvironment = new NodeEnvironment(tmpSettings, environment, nodeIdConsumer);
|
|
||||||
resourcesToClose.add(nodeEnvironment);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new IllegalStateException("Failed to create node environment", ex);
|
|
||||||
}
|
|
||||||
if (nodeNameExplicitlyDefined) {
|
|
||||||
logger.info("node name [{}], node ID [{}]",
|
|
||||||
NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId());
|
|
||||||
} else {
|
|
||||||
tmpSettings = Settings.builder()
|
|
||||||
.put(tmpSettings)
|
|
||||||
.put(NODE_NAME_SETTING.getKey(), nodeIdToNodeName(nodeEnvironment.nodeId()))
|
|
||||||
.build();
|
|
||||||
logger.info("node name derived from node ID [{}]; set [{}] to override",
|
|
||||||
nodeEnvironment.nodeId(), NODE_NAME_SETTING.getKey());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
final JvmInfo jvmInfo = JvmInfo.jvmInfo();
|
final JvmInfo jvmInfo = JvmInfo.jvmInfo();
|
||||||
logger.info(
|
logger.info(
|
||||||
|
@ -1018,18 +984,6 @@ public abstract class Node implements Closeable {
|
||||||
return networkModule.getHttpServerTransportSupplier().get();
|
return networkModule.getHttpServerTransportSupplier().get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* If the node name was derived from the node id this is called with the
|
|
||||||
* node name as soon as it is available so that we can register the
|
|
||||||
* node name with the logger. If the node name defined in elasticsearch.yml
|
|
||||||
* this is never called.
|
|
||||||
*/
|
|
||||||
protected abstract void registerDerivedNodeNameWithLogger(String nodeName);
|
|
||||||
|
|
||||||
private String nodeIdToNodeName(String nodeId) {
|
|
||||||
return nodeId.substring(0, 7);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class LocalNodeFactory implements Function<BoundTransportAddress, DiscoveryNode> {
|
private static class LocalNodeFactory implements Function<BoundTransportAddress, DiscoveryNode> {
|
||||||
private final SetOnce<DiscoveryNode> localNode = new SetOnce<>();
|
private final SetOnce<DiscoveryNode> localNode = new SetOnce<>();
|
||||||
private final String persistentNodeId;
|
private final String persistentNodeId;
|
||||||
|
|
|
@ -80,12 +80,12 @@ public class NodeEnvironmentTests extends ESTestCase {
|
||||||
|
|
||||||
// Reuse the same location and attempt to lock again
|
// Reuse the same location and attempt to lock again
|
||||||
IllegalStateException ex = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException ex = expectThrows(IllegalStateException.class, () ->
|
||||||
new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}));
|
new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)));
|
||||||
assertThat(ex.getMessage(), containsString("failed to obtain node lock"));
|
assertThat(ex.getMessage(), containsString("failed to obtain node lock"));
|
||||||
|
|
||||||
// Close the environment that holds the lock and make sure we can get the lock after release
|
// Close the environment that holds the lock and make sure we can get the lock after release
|
||||||
env.close();
|
env.close();
|
||||||
env = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
env = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
assertThat(env.nodeDataPaths(), arrayWithSize(dataPaths.size()));
|
assertThat(env.nodeDataPaths(), arrayWithSize(dataPaths.size()));
|
||||||
|
|
||||||
for (int i = 0; i < dataPaths.size(); i++) {
|
for (int i = 0; i < dataPaths.size(); i++) {
|
||||||
|
@ -120,7 +120,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
||||||
final Settings settings = buildEnvSettings(Settings.builder().put("node.max_local_storage_nodes", 2).build());
|
final Settings settings = buildEnvSettings(Settings.builder().put("node.max_local_storage_nodes", 2).build());
|
||||||
final NodeEnvironment first = newNodeEnvironment(settings);
|
final NodeEnvironment first = newNodeEnvironment(settings);
|
||||||
List<String> dataPaths = Environment.PATH_DATA_SETTING.get(settings);
|
List<String> dataPaths = Environment.PATH_DATA_SETTING.get(settings);
|
||||||
NodeEnvironment second = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
NodeEnvironment second = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
assertEquals(first.nodeDataPaths().length, dataPaths.size());
|
assertEquals(first.nodeDataPaths().length, dataPaths.size());
|
||||||
assertEquals(second.nodeDataPaths().length, dataPaths.size());
|
assertEquals(second.nodeDataPaths().length, dataPaths.size());
|
||||||
for (int i = 0; i < dataPaths.size(); i++) {
|
for (int i = 0; i < dataPaths.size(); i++) {
|
||||||
|
@ -477,7 +477,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
||||||
@Override
|
@Override
|
||||||
public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException {
|
public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException {
|
||||||
Settings build = buildEnvSettings(settings);
|
Settings build = buildEnvSettings(settings);
|
||||||
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
}
|
}
|
||||||
|
|
||||||
public Settings buildEnvSettings(Settings settings) {
|
public Settings buildEnvSettings(Settings settings) {
|
||||||
|
@ -492,7 +492,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
||||||
.put(settings)
|
.put(settings)
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||||
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
}
|
}
|
||||||
|
|
||||||
public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException {
|
public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException {
|
||||||
|
@ -501,6 +501,6 @@ public class NodeEnvironmentTests extends ESTestCase {
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||||
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
|
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||||
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -135,7 +135,7 @@ public class IndexModuleTests extends ESTestCase {
|
||||||
bigArrays = new BigArrays(pageCacheRecycler, circuitBreakerService);
|
bigArrays = new BigArrays(pageCacheRecycler, circuitBreakerService);
|
||||||
scriptService = new ScriptService(settings, Collections.emptyMap(), Collections.emptyMap());
|
scriptService = new ScriptService(settings, Collections.emptyMap(), Collections.emptyMap());
|
||||||
clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||||
nodeEnvironment = new NodeEnvironment(settings, environment, nodeId -> {});
|
nodeEnvironment = new NodeEnvironment(settings, environment);
|
||||||
mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
|
mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -178,7 +178,7 @@ public class NewPathForShardTests extends ESTestCase {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||||
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
|
|
||||||
// Make sure all our mocking above actually worked:
|
// Make sure all our mocking above actually worked:
|
||||||
NodePath[] nodePaths = nodeEnv.nodePaths();
|
NodePath[] nodePaths = nodeEnv.nodePaths();
|
||||||
|
@ -233,7 +233,7 @@ public class NewPathForShardTests extends ESTestCase {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||||
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
|
|
||||||
// Make sure all our mocking above actually worked:
|
// Make sure all our mocking above actually worked:
|
||||||
NodePath[] nodePaths = nodeEnv.nodePaths();
|
NodePath[] nodePaths = nodeEnv.nodePaths();
|
||||||
|
@ -290,7 +290,7 @@ public class NewPathForShardTests extends ESTestCase {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||||
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
|
|
||||||
aFileStore.usableSpace = 100000;
|
aFileStore.usableSpace = 100000;
|
||||||
bFileStore.usableSpace = 1000;
|
bFileStore.usableSpace = 1000;
|
||||||
|
@ -315,7 +315,7 @@ public class NewPathForShardTests extends ESTestCase {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||||
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {});
|
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
|
||||||
|
|
||||||
// Make sure all our mocking above actually worked:
|
// Make sure all our mocking above actually worked:
|
||||||
NodePath[] nodePaths = nodeEnv.nodePaths();
|
NodePath[] nodePaths = nodeEnv.nodePaths();
|
||||||
|
|
|
@ -399,7 +399,7 @@ public class RemoveCorruptedShardDataCommandTests extends IndexShardTestCase {
|
||||||
|
|
||||||
private void writeIndexState() throws IOException {
|
private void writeIndexState() throws IOException {
|
||||||
// create _state of IndexMetaData
|
// create _state of IndexMetaData
|
||||||
try(NodeEnvironment nodeEnvironment = new NodeEnvironment(environment.settings(), environment, nId -> {})) {
|
try(NodeEnvironment nodeEnvironment = new NodeEnvironment(environment.settings(), environment)) {
|
||||||
final Path[] paths = nodeEnvironment.indexPaths(indexMetaData.getIndex());
|
final Path[] paths = nodeEnvironment.indexPaths(indexMetaData.getIndex());
|
||||||
IndexMetaData.FORMAT.write(indexMetaData, paths);
|
IndexMetaData.FORMAT.write(indexMetaData, paths);
|
||||||
logger.info("--> index metadata persisted to {} ", Arrays.toString(paths));
|
logger.info("--> index metadata persisted to {} ", Arrays.toString(paths));
|
||||||
|
|
|
@ -37,9 +37,14 @@ import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyMap;
|
||||||
|
|
||||||
public class InternalSettingsPreparerTests extends ESTestCase {
|
public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
|
private static final Supplier<String> DEFAULT_NODE_NAME_SHOULDNT_BE_CALLED = () -> {
|
||||||
|
throw new AssertionError("shouldn't be called");
|
||||||
|
};
|
||||||
|
|
||||||
Path homeDir;
|
Path homeDir;
|
||||||
Settings baseEnvSettings;
|
Settings baseEnvSettings;
|
||||||
|
@ -60,13 +65,14 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
|
|
||||||
public void testEmptySettings() {
|
public void testEmptySettings() {
|
||||||
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
|
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
|
||||||
assertNull(settings.get("node.name")); // a name was not set
|
assertNull(settings.get("node.name"));
|
||||||
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
|
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
|
||||||
int size = settings.names().size();
|
int size = settings.names().size();
|
||||||
|
|
||||||
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings);
|
String defaultNodeName = randomAlphaOfLength(8);
|
||||||
|
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, emptyMap(), null, () -> defaultNodeName);
|
||||||
settings = env.settings();
|
settings = env.settings();
|
||||||
assertNull(settings.get("node.name")); // a name was not set
|
assertEquals(defaultNodeName, settings.get("node.name"));
|
||||||
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
|
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
|
||||||
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
|
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
|
||||||
String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings);
|
String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings);
|
||||||
|
@ -88,9 +94,8 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
Path config = home.resolve("config");
|
Path config = home.resolve("config");
|
||||||
Files.createDirectory(config);
|
Files.createDirectory(config);
|
||||||
Files.copy(garbage, config.resolve("elasticsearch.yml"));
|
Files.copy(garbage, config.resolve("elasticsearch.yml"));
|
||||||
InternalSettingsPreparer.prepareEnvironment(Settings.builder()
|
InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(),
|
||||||
.put(baseEnvSettings)
|
emptyMap(), null, () -> "default_node_name");
|
||||||
.build());
|
|
||||||
} catch (SettingsException e) {
|
} catch (SettingsException e) {
|
||||||
assertEquals("Failed to load settings from [elasticsearch.yml]", e.getMessage());
|
assertEquals("Failed to load settings from [elasticsearch.yml]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -101,8 +106,8 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
Path config = homeDir.resolve("config");
|
Path config = homeDir.resolve("config");
|
||||||
Files.createDirectory(config);
|
Files.createDirectory(config);
|
||||||
Files.copy(yaml, config.resolve("elasticsearch.yaml"));
|
Files.copy(yaml, config.resolve("elasticsearch.yaml"));
|
||||||
SettingsException e = expectThrows(SettingsException.class, () ->
|
SettingsException e = expectThrows(SettingsException.class, () -> InternalSettingsPreparer.prepareEnvironment(
|
||||||
InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build()));
|
Settings.builder().put(baseEnvSettings).build(), emptyMap(), null, DEFAULT_NODE_NAME_SHOULDNT_BE_CALLED));
|
||||||
assertEquals("elasticsearch.yaml was deprecated in 5.5.0 and must be renamed to elasticsearch.yml", e.getMessage());
|
assertEquals("elasticsearch.yaml was deprecated in 5.5.0 and must be renamed to elasticsearch.yml", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,8 +116,8 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
Path config = homeDir.resolve("config");
|
Path config = homeDir.resolve("config");
|
||||||
Files.createDirectory(config);
|
Files.createDirectory(config);
|
||||||
Files.copy(yaml, config.resolve("elasticsearch.json"));
|
Files.copy(yaml, config.resolve("elasticsearch.json"));
|
||||||
SettingsException e = expectThrows(SettingsException.class, () ->
|
SettingsException e = expectThrows(SettingsException.class, () -> InternalSettingsPreparer.prepareEnvironment(
|
||||||
InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build()));
|
Settings.builder().put(baseEnvSettings).build(), emptyMap(), null, DEFAULT_NODE_NAME_SHOULDNT_BE_CALLED));
|
||||||
assertEquals("elasticsearch.json was deprecated in 5.5.0 and must be converted to elasticsearch.yml", e.getMessage());
|
assertEquals("elasticsearch.json was deprecated in 5.5.0 and must be converted to elasticsearch.yml", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,14 +125,16 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||||
MockSecureSettings secureSettings = new MockSecureSettings();
|
MockSecureSettings secureSettings = new MockSecureSettings();
|
||||||
secureSettings.setString("foo", "secret");
|
secureSettings.setString("foo", "secret");
|
||||||
Settings input = Settings.builder().put(baseEnvSettings).setSecureSettings(secureSettings).build();
|
Settings input = Settings.builder().put(baseEnvSettings).setSecureSettings(secureSettings).build();
|
||||||
Environment env = InternalSettingsPreparer.prepareEnvironment(input);
|
Environment env = InternalSettingsPreparer.prepareEnvironment(input, emptyMap(),
|
||||||
|
null, () -> "default_node_name");
|
||||||
Setting<SecureString> fakeSetting = SecureSetting.secureString("foo", null);
|
Setting<SecureString> fakeSetting = SecureSetting.secureString("foo", null);
|
||||||
assertEquals("secret", fakeSetting.get(env.settings()).toString());
|
assertEquals("secret", fakeSetting.get(env.settings()).toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDefaultPropertiesDoNothing() throws Exception {
|
public void testDefaultPropertiesDoNothing() throws Exception {
|
||||||
Map<String, String> props = Collections.singletonMap("default.setting", "foo");
|
Map<String, String> props = Collections.singletonMap("default.setting", "foo");
|
||||||
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, props, null);
|
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, props,
|
||||||
|
null, () -> "default_node_name");
|
||||||
assertEquals("foo", env.settings().get("default.setting"));
|
assertEquals("foo", env.settings().get("default.setting"));
|
||||||
assertNull(env.settings().get("setting"));
|
assertNull(env.settings().get("setting"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,6 @@ import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.reset;
|
import static org.mockito.Mockito.reset;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
@ -49,22 +48,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||||
@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS")
|
@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS")
|
||||||
public class NodeTests extends ESTestCase {
|
public class NodeTests extends ESTestCase {
|
||||||
|
|
||||||
public void testNodeName() throws IOException {
|
|
||||||
final String name = randomBoolean() ? randomAlphaOfLength(10) : null;
|
|
||||||
Settings.Builder settings = baseSettings();
|
|
||||||
if (name != null) {
|
|
||||||
settings.put(Node.NODE_NAME_SETTING.getKey(), name);
|
|
||||||
}
|
|
||||||
try (Node node = new MockNode(settings.build(), basePlugins())) {
|
|
||||||
final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings();
|
|
||||||
if (name == null) {
|
|
||||||
assertThat(Node.NODE_NAME_SETTING.get(nodeSettings), equalTo(node.getNodeEnvironment().nodeId().substring(0, 7)));
|
|
||||||
} else {
|
|
||||||
assertThat(Node.NODE_NAME_SETTING.get(nodeSettings), equalTo(name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class CheckPlugin extends Plugin {
|
public static class CheckPlugin extends Plugin {
|
||||||
public static final BootstrapCheck CHECK = context -> BootstrapCheck.BootstrapCheckResult.success();
|
public static final BootstrapCheck CHECK = context -> BootstrapCheck.BootstrapCheckResult.success();
|
||||||
|
|
||||||
|
|
|
@ -46,12 +46,17 @@ public abstract class NodeNameInLogsIntegTestCase extends ESRestTestCase {
|
||||||
*/
|
*/
|
||||||
private static final int LINES_TO_CHECK = 10;
|
private static final int LINES_TO_CHECK = 10;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The node name to expect in the logs file.
|
||||||
|
*/
|
||||||
|
protected abstract org.hamcrest.Matcher<String> nodeNameMatcher();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open the log file. This is delegated to subclasses because the test
|
* Open the log file. This is delegated to subclasses because the test
|
||||||
* framework doesn't have permission to read from the log file but
|
* framework doesn't have permission to read from the log file but
|
||||||
* subclasses can grant themselves that permission.
|
* subclasses can grant themselves that permission.
|
||||||
*/
|
*/
|
||||||
protected abstract BufferedReader openReader(Path logFile) throws IOException ;
|
protected abstract BufferedReader openReader(Path logFile);
|
||||||
|
|
||||||
public void testNodeNameIsOnAllLinesOfLog() throws IOException {
|
public void testNodeNameIsOnAllLinesOfLog() throws IOException {
|
||||||
BufferedReader logReader = openReader(getLogFile());
|
BufferedReader logReader = openReader(getLogFile());
|
||||||
|
@ -64,7 +69,7 @@ public abstract class NodeNameInLogsIntegTestCase extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
String nodeName = m.group(1);
|
String nodeName = m.group(1);
|
||||||
|
|
||||||
assertNotEquals("unknown", nodeName);
|
assertThat(nodeName, nodeNameMatcher());
|
||||||
|
|
||||||
int lineNumber = 1;
|
int lineNumber = 1;
|
||||||
while (true) {
|
while (true) {
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class MockNode extends Node {
|
||||||
final Path configPath,
|
final Path configPath,
|
||||||
final boolean forbidPrivateIndexSettings) {
|
final boolean forbidPrivateIndexSettings) {
|
||||||
this(
|
this(
|
||||||
InternalSettingsPreparer.prepareEnvironment(settings, Collections.emptyMap(), configPath),
|
InternalSettingsPreparer.prepareEnvironment(settings, Collections.emptyMap(), configPath, () -> "mock_ node"),
|
||||||
classpathPlugins,
|
classpathPlugins,
|
||||||
forbidPrivateIndexSettings);
|
forbidPrivateIndexSettings);
|
||||||
}
|
}
|
||||||
|
@ -174,9 +174,4 @@ public class MockNode extends Node {
|
||||||
return new MockHttpTransport();
|
return new MockHttpTransport();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void registerDerivedNodeNameWithLogger(String nodeName) {
|
|
||||||
// Nothing to do because test uses the thread name
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,6 +93,7 @@ import java.util.function.Function;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
|
import static java.util.Collections.emptyMap;
|
||||||
import static java.util.stream.Collectors.toList;
|
import static java.util.stream.Collectors.toList;
|
||||||
|
|
||||||
public abstract class AbstractBuilderTestCase extends ESTestCase {
|
public abstract class AbstractBuilderTestCase extends ESTestCase {
|
||||||
|
@ -330,7 +331,10 @@ public abstract class AbstractBuilderTestCase extends ESTestCase {
|
||||||
AbstractBuilderTestCase testCase,
|
AbstractBuilderTestCase testCase,
|
||||||
boolean registerType) throws IOException {
|
boolean registerType) throws IOException {
|
||||||
this.nowInMillis = nowInMillis;
|
this.nowInMillis = nowInMillis;
|
||||||
Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings);
|
Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings, emptyMap(),
|
||||||
|
null, () -> {
|
||||||
|
throw new AssertionError("node.name must be set");
|
||||||
|
});
|
||||||
PluginsService pluginsService;
|
PluginsService pluginsService;
|
||||||
pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins);
|
pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins);
|
||||||
|
|
||||||
|
|
|
@ -934,7 +934,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||||
.put(settings)
|
.put(settings)
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
|
||||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
||||||
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
return new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return consistent index settings for the provided index version. */
|
/** Return consistent index settings for the provided index version. */
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||||
|
import org.elasticsearch.node.InternalSettingsPreparer;
|
||||||
import org.elasticsearch.node.MockNode;
|
import org.elasticsearch.node.MockNode;
|
||||||
import org.elasticsearch.node.Node;
|
import org.elasticsearch.node.Node;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
|
@ -94,12 +95,13 @@ public class WatcherScheduleEngineBenchmark {
|
||||||
|
|
||||||
|
|
||||||
// First clean everything and index the watcher (but not via put alert api!)
|
// First clean everything and index the watcher (but not via put alert api!)
|
||||||
try (Node node = new Node(Settings.builder().put(SETTINGS).put("node.data", false).build()) {
|
try (Node node = new Node(InternalSettingsPreparer.prepareEnvironment(
|
||||||
@Override
|
Settings.builder().put(SETTINGS).put("node.data", false).build(),
|
||||||
protected void registerDerivedNodeNameWithLogger(String nodeName) {
|
emptyMap(),
|
||||||
// Nothing to do because test uses the thread name
|
null,
|
||||||
}
|
() -> {
|
||||||
}.start()) {
|
throw new IllegalArgumentException("settings must have [node.name]");
|
||||||
|
})).start()) {
|
||||||
try (Client client = node.client()) {
|
try (Client client = node.client()) {
|
||||||
ClusterHealthResponse response = client.admin().cluster().prepareHealth().setWaitForNodes("2").get();
|
ClusterHealthResponse response = client.admin().cluster().prepareHealth().setWaitForNodes("2").get();
|
||||||
if (response.getNumberOfNodes() != 2 && response.getNumberOfDataNodes() != 1) {
|
if (response.getNumberOfNodes() != 2 && response.getNumberOfDataNodes() != 1) {
|
||||||
|
|
Loading…
Reference in New Issue