[1.x] Backport opensearch-upgrade CLI tool (#1222)
* A CLI tool to assist during an upgrade to OpenSearch. (#846) This change adds the initial version of a new CLI tool `opensearch-upgrade` as part of the OpenSearch distribution. This tool is meant for assisting during an upgrade from an existing Elasticsearch v7.10.2/v6.8.0 node to OpenSearch. It automates the process of importing existing configurations and installing of core plugins. Signed-off-by: Rabi Panda <adnapibar@gmail.com> * Validation for official plugins for upgrade tool (#973) Add validation to check for official plugins during the plugins installation task for the upgrade tool. Signed-off-by: Vacha Shah <vachshah@amazon.com> Co-authored-by: Vacha <vachshah@amazon.com>
This commit is contained in:
parent
ba9c8c0844
commit
6fe24609e0
|
@ -268,7 +268,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
* Properties to expand when copying packaging files *
|
||||
*****************************************************************************/
|
||||
configurations {
|
||||
['libs', 'libsPluginCli', 'libsKeystoreCli', 'libsSecurityCli'].each {
|
||||
['libs', 'libsPluginCli', 'libsKeystoreCli', 'libsUpgradeCli'].each {
|
||||
create(it) {
|
||||
canBeConsumed = false
|
||||
canBeResolved = true
|
||||
|
@ -289,6 +289,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
|
||||
libsPluginCli project(':distribution:tools:plugin-cli')
|
||||
libsKeystoreCli project(path: ':distribution:tools:keystore-cli')
|
||||
libsUpgradeCli project(path: ':distribution:tools:upgrade-cli')
|
||||
}
|
||||
|
||||
project.ext {
|
||||
|
@ -306,6 +307,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
into('tools/keystore-cli') {
|
||||
from(configurations.libsKeystoreCli)
|
||||
}
|
||||
into('tools/upgrade-cli') {
|
||||
from(configurations.libsUpgradeCli)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
OPENSEARCH_MAIN_CLASS=org.opensearch.upgrade.UpgradeCli \
|
||||
OPENSEARCH_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/upgrade-cli \
|
||||
"`dirname "$0"`"/opensearch-cli \
|
||||
"$@"
|
|
@ -0,0 +1,16 @@
|
|||
@echo off
|
||||
|
||||
setlocal enabledelayedexpansion
|
||||
setlocal enableextensions
|
||||
|
||||
set OPENSEARCH_MAIN_CLASS=org.opensearch.upgrade.UpgradeCli
|
||||
set OPENSEARCH_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/upgrade-cli
|
||||
call "%~dp0opensearch-cli.bat" ^
|
||||
%%* ^
|
||||
|| goto exit
|
||||
|
||||
|
||||
endlocal
|
||||
endlocal
|
||||
:exit
|
||||
exit /b %ERRORLEVEL%
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*
|
||||
*/
|
||||
|
||||
apply plugin: 'opensearch.build'
|
||||
|
||||
archivesBaseName = 'opensearch-upgrade-cli'
|
||||
|
||||
dependencies {
|
||||
compileOnly project(":server")
|
||||
compileOnly project(":libs:opensearch-cli")
|
||||
implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||
implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
|
||||
implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
|
||||
testImplementation project(":test:framework")
|
||||
testImplementation 'com.google.jimfs:jimfs:1.2'
|
||||
testRuntimeOnly 'com.google.guava:guava:30.1.1-jre'
|
||||
}
|
||||
|
||||
tasks.named("dependencyLicenses").configure {
|
||||
mapping from: /jackson-.*/, to: 'jackson'
|
||||
}
|
||||
|
||||
test {
|
||||
systemProperty 'tests.security.manager', 'false'
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
This copy of Jackson JSON processor streaming parser/generator is licensed under the
|
||||
Apache (Software) License, version 2.0 ("the License").
|
||||
See the License for details about distribution rights, and the
|
||||
specific rights regarding derivate works.
|
||||
|
||||
You may obtain a copy of the License at:
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
@ -0,0 +1,20 @@
|
|||
# Jackson JSON processor
|
||||
|
||||
Jackson is a high-performance, Free/Open Source JSON processing library.
|
||||
It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has
|
||||
been in development since 2007.
|
||||
It is currently developed by a community of developers, as well as supported
|
||||
commercially by FasterXML.com.
|
||||
|
||||
## Licensing
|
||||
|
||||
Jackson core and extension components may licensed under different licenses.
|
||||
To find the details that apply to this artifact see the accompanying LICENSE file.
|
||||
For more information, including possible other licensing options, contact
|
||||
FasterXML.com (http://fasterxml.com).
|
||||
|
||||
## Credits
|
||||
|
||||
A list of contributors may be found from CREDITS file, which is included
|
||||
in some artifacts (usually source distributions); but is always available
|
||||
from the source code management (SCM) system project uses.
|
|
@ -0,0 +1 @@
|
|||
2c3f5c079330f3a01726686a078979420f547ae4
|
|
@ -0,0 +1 @@
|
|||
5d9f3d441f99d721b957e3497f0a6465c764fad4
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.common.settings;
|
||||
|
||||
/**
|
||||
* Utility that has package level access to the {@link KeyStoreWrapper} for
|
||||
* saving a setting.
|
||||
*/
|
||||
public final class KeystoreWrapperUtil {
|
||||
/**
|
||||
* No public constructor. Contains only static functions.
|
||||
*/
|
||||
private KeystoreWrapperUtil() {}
|
||||
|
||||
/**
|
||||
* Save a secure setting using the wrapper.
|
||||
*
|
||||
* @param keystore an instance of {@link KeyStoreWrapper}
|
||||
* @param setting setting to save
|
||||
* @param bytes value of the setting in bytes
|
||||
*/
|
||||
public static void saveSetting(KeyStoreWrapper keystore, String setting, byte[] bytes) {
|
||||
keystore.setFile(setting, bytes);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This exists to get access to the package level methods of KeyStoreWrapper.
|
||||
*/
|
||||
package org.opensearch.common.settings;
|
|
@ -0,0 +1,202 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Scanner;
|
||||
|
||||
/**
|
||||
* Looks for an existing elasticsearch installation. First it tries to identify automatically,
|
||||
* and if unsuccessful, asks the user to input the missing details.
|
||||
* <p>
|
||||
* If an elasticsearch installation can not be found, throws a runtime error which fails the
|
||||
* upgrade task.
|
||||
*/
|
||||
class DetectEsInstallationTask implements UpgradeTask {
|
||||
private static final int ES_DEFAULT_PORT = 9200;
|
||||
private static final String ES_CONFIG_ENV = "ES_PATH_CONF";
|
||||
private static final String ES_CONFIG_YML = "elasticsearch.yml";
|
||||
private static final String ES_HOME = "ES_HOME";
|
||||
|
||||
@SuppressForbidden(reason = "We need to read external es config files")
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
try {
|
||||
terminal.println("Looking for an elasticsearch installation ...");
|
||||
String esHomeEnv = System.getenv(ES_HOME);
|
||||
if (esHomeEnv == null) {
|
||||
esHomeEnv = terminal.readText("Missing ES_HOME env variable, enter the path to elasticsearch home: ");
|
||||
if (esHomeEnv == null || esHomeEnv.isEmpty()) {
|
||||
throw new RuntimeException("Invalid input for path to elasticsearch home directory.");
|
||||
}
|
||||
}
|
||||
taskInput.setEsHome(new File(esHomeEnv).toPath());
|
||||
|
||||
String esConfEnv = System.getenv(ES_CONFIG_ENV);
|
||||
if (esConfEnv == null) {
|
||||
esConfEnv = terminal.readText("Missing ES_PATH_CONF env variable, enter the path to elasticsearch config directory: ");
|
||||
if (esConfEnv == null || esHomeEnv.isEmpty()) {
|
||||
throw new RuntimeException("Invalid input for path to elasticsearch config directory.");
|
||||
}
|
||||
}
|
||||
taskInput.setEsConfig(new File(esConfEnv).toPath());
|
||||
|
||||
final Settings esSettings = Settings.builder().loadFromPath(taskInput.getEsConfig().resolve(ES_CONFIG_YML)).build();
|
||||
final String url = retrieveUrl(esSettings);
|
||||
taskInput.setBaseUrl(url);
|
||||
final boolean running = isRunning(url);
|
||||
taskInput.setRunning(running);
|
||||
if (running) {
|
||||
terminal.println("Found a running instance of elasticsearch at " + url);
|
||||
taskInput.setRunning(true);
|
||||
try {
|
||||
updateTaskInput(taskInput, fetchInfoFromUrl(taskInput.getBaseUrl()));
|
||||
} catch (RuntimeException e) {
|
||||
updateTaskInput(taskInput, fetchInfoFromEsSettings(esSettings));
|
||||
}
|
||||
try {
|
||||
taskInput.setPlugins(fetchPluginsFromUrl(taskInput.getBaseUrl()));
|
||||
} catch (RuntimeException e) {
|
||||
taskInput.setPlugins(detectPluginsFromEsHome(taskInput.getEsHome()));
|
||||
}
|
||||
} else {
|
||||
terminal.println("Did not find a running instance of elasticsearch at " + url);
|
||||
updateTaskInput(taskInput, fetchInfoFromEsSettings(esSettings));
|
||||
taskInput.setPlugins(detectPluginsFromEsHome(taskInput.getEsHome()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error detecting existing elasticsearch installation. " + e);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void updateTaskInput(TaskInput taskInput, Map<?, ?> response) {
|
||||
final Map<String, String> versionMap = (Map<String, String>) response.get("version");
|
||||
if (versionMap != null) {
|
||||
final String vStr = versionMap.get("number");
|
||||
if (vStr != null) {
|
||||
taskInput.setVersion(Version.fromString(vStr));
|
||||
}
|
||||
}
|
||||
taskInput.setNode((String) response.get("name"));
|
||||
taskInput.setCluster((String) response.get("cluster_name"));
|
||||
}
|
||||
|
||||
// package private for unit testing
|
||||
String retrieveUrl(final Settings esSettings) {
|
||||
final int port = Optional.ofNullable(esSettings.get("http.port")).map(this::extractPort).orElse(ES_DEFAULT_PORT);
|
||||
return "http://localhost:" + port;
|
||||
}
|
||||
|
||||
private Integer extractPort(final String port) {
|
||||
try {
|
||||
return Integer.parseInt(port.trim());
|
||||
} catch (Exception ex) {
|
||||
return ES_DEFAULT_PORT;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Need to connect to http endpoint for elasticsearch.")
|
||||
private boolean isRunning(final String url) {
|
||||
try {
|
||||
final URL esUrl = new URL(url);
|
||||
final HttpURLConnection conn = (HttpURLConnection) esUrl.openConnection();
|
||||
conn.setRequestMethod("GET");
|
||||
conn.setConnectTimeout(1000);
|
||||
conn.connect();
|
||||
return conn.getResponseCode() == 200;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Retrieve information on the installation.")
|
||||
private Map<?, ?> fetchInfoFromUrl(final String url) {
|
||||
try {
|
||||
final URL esUrl = new URL(url);
|
||||
final HttpURLConnection conn = (HttpURLConnection) esUrl.openConnection();
|
||||
conn.setRequestMethod("GET");
|
||||
conn.setConnectTimeout(1000);
|
||||
conn.connect();
|
||||
|
||||
final StringBuilder json = new StringBuilder();
|
||||
final Scanner scanner = new Scanner(esUrl.openStream());
|
||||
while (scanner.hasNext()) {
|
||||
json.append(scanner.nextLine());
|
||||
}
|
||||
scanner.close();
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
return mapper.readValue(json.toString(), Map.class);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error retrieving elasticsearch cluster info, " + e);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<?, ?> fetchInfoFromEsSettings(final Settings esSettings) throws IOException {
|
||||
final Map<String, String> info = new HashMap<>();
|
||||
final String node = esSettings.get("node.name") != null ? esSettings.get("node.name") : "unknown";
|
||||
final String cluster = esSettings.get("cluster.name") != null ? esSettings.get("cluster.name") : "unknown";
|
||||
info.put("name", node);
|
||||
info.put("cluster_name", cluster);
|
||||
return info;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressForbidden(reason = "Retrieve information on installed plugins.")
|
||||
private List<String> fetchPluginsFromUrl(final String url) {
|
||||
final List<String> plugins = new ArrayList<>();
|
||||
try {
|
||||
final URL esUrl = new URL(url + "/_cat/plugins?format=json&local=true");
|
||||
final HttpURLConnection conn = (HttpURLConnection) esUrl.openConnection();
|
||||
conn.setRequestMethod("GET");
|
||||
conn.setConnectTimeout(1000);
|
||||
conn.connect();
|
||||
if (conn.getResponseCode() == 200) {
|
||||
final StringBuilder json = new StringBuilder();
|
||||
final Scanner scanner = new Scanner(esUrl.openStream());
|
||||
while (scanner.hasNext()) {
|
||||
json.append(scanner.nextLine());
|
||||
}
|
||||
scanner.close();
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
final Map<String, String>[] response = mapper.readValue(json.toString(), Map[].class);
|
||||
for (Map<String, String> plugin : response) {
|
||||
plugins.add(plugin.get("component"));
|
||||
}
|
||||
}
|
||||
return plugins;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error retrieving elasticsearch plugin details, " + e);
|
||||
}
|
||||
}
|
||||
|
||||
private List<String> detectPluginsFromEsHome(final Path esHome) {
|
||||
// list out the contents of the plugins directory under esHome
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Imports JVM options from an existing elasticsearch installation.
|
||||
*/
|
||||
class ImportJvmOptionsTask implements UpgradeTask {
|
||||
private static final String JVM_OPTIONS_D = "jvm.options.d";
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
try {
|
||||
terminal.println("Importing JVM options ...");
|
||||
final Path jvmOptionsDir = taskInput.getOpenSearchConfig().resolve(JVM_OPTIONS_D);
|
||||
if (!Files.exists(jvmOptionsDir)) {
|
||||
Files.createDirectory(jvmOptionsDir);
|
||||
}
|
||||
|
||||
final Path esJvmOptionsDir = taskInput.getEsConfig().resolve(JVM_OPTIONS_D);
|
||||
if (Files.exists(esJvmOptionsDir) && Files.isDirectory(esJvmOptionsDir)) {
|
||||
final List<Path> esJvmOptionsFiles = Files.list(esJvmOptionsDir).collect(Collectors.toList());
|
||||
for (Path esJvmOptFile : esJvmOptionsFiles) {
|
||||
final Path jvmOptFile = jvmOptionsDir.resolve(esJvmOptFile.getFileName().toString());
|
||||
Files.copy(esJvmOptFile, jvmOptFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
terminal.println("Success!" + System.lineSeparator());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error importing JVM options. " + e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.settings.KeyStoreWrapper;
|
||||
import org.opensearch.common.settings.KeystoreWrapperUtil;
|
||||
import org.opensearch.common.settings.SecureString;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* Imports the secure Keystore settings from an existing elasticsearch installation.
|
||||
*/
|
||||
class ImportKeystoreTask implements UpgradeTask {
|
||||
private static final String OPENSEARCH_KEYSTORE_FILENAME = "opensearch.keystore";
|
||||
private static final String ES_KEYSTORE_FILENAME = "elasticsearch.keystore";
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
SecureString keyStorePassword = new SecureString(new char[0]);
|
||||
try {
|
||||
terminal.println("Importing keystore settings ...");
|
||||
final KeyStoreWrapper esKeystore = KeyStoreWrapper.load(taskInput.getEsConfig(), ES_KEYSTORE_FILENAME);
|
||||
if (esKeystore == null) {
|
||||
terminal.println("No elasticsearch keystore settings to import.");
|
||||
return;
|
||||
}
|
||||
KeyStoreWrapper openSearchKeystore = KeyStoreWrapper.load(
|
||||
taskInput.getOpenSearchConfig().resolve(OPENSEARCH_KEYSTORE_FILENAME)
|
||||
);
|
||||
if (openSearchKeystore == null) {
|
||||
openSearchKeystore = KeyStoreWrapper.create();
|
||||
}
|
||||
if (esKeystore.hasPassword()) {
|
||||
final char[] passwordArray = terminal.readSecret("Enter password for the elasticsearch keystore : ");
|
||||
keyStorePassword = new SecureString(passwordArray);
|
||||
}
|
||||
esKeystore.decrypt(keyStorePassword.getChars());
|
||||
for (String setting : esKeystore.getSettingNames()) {
|
||||
if (setting.equals("keystore.seed")) {
|
||||
continue;
|
||||
}
|
||||
if (!openSearchKeystore.getSettingNames().contains(setting)) {
|
||||
InputStream settingIS = esKeystore.getFile(setting);
|
||||
byte[] bytes = new byte[settingIS.available()];
|
||||
settingIS.read(bytes);
|
||||
KeystoreWrapperUtil.saveSetting(openSearchKeystore, setting, bytes);
|
||||
}
|
||||
}
|
||||
openSearchKeystore.save(taskInput.getOpenSearchConfig(), keyStorePassword.getChars());
|
||||
terminal.println("Success!" + System.lineSeparator());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error importing keystore settings from elasticsearch, " + e);
|
||||
} finally {
|
||||
keyStorePassword.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Imports Log4j properties from an existing elasticsearch installation.
|
||||
*/
|
||||
class ImportLog4jPropertiesTask implements UpgradeTask {
|
||||
static final String LOG4J_PROPERTIES = "log4j2.properties";
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
try {
|
||||
terminal.println("Importing log4j.properties ...");
|
||||
final Path log4jPropPath = taskInput.getOpenSearchConfig().resolve(LOG4J_PROPERTIES);
|
||||
if (Files.exists(log4jPropPath)) {
|
||||
Files.copy(
|
||||
log4jPropPath,
|
||||
taskInput.getOpenSearchConfig().resolve(LOG4J_PROPERTIES + ".bkp"),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
);
|
||||
}
|
||||
final Path esLog4jPropPath = taskInput.getEsConfig().resolve(LOG4J_PROPERTIES);
|
||||
try (
|
||||
InputStream esLog4jIs = Files.newInputStream(esLog4jPropPath);
|
||||
OutputStream log4jOs = Files.newOutputStream(log4jPropPath, StandardOpenOption.TRUNCATE_EXISTING)
|
||||
) {
|
||||
final Properties esLog4JProps = new Properties();
|
||||
esLog4JProps.load(esLog4jIs);
|
||||
final Properties log4jProps = renameValues(esLog4JProps);
|
||||
|
||||
log4jProps.store(log4jOs, "This is an auto-generated file imported from an existing elasticsearch installation.");
|
||||
}
|
||||
terminal.println("Success!" + System.lineSeparator());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error copying log4j properties. " + e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rename the values for OpenSearch log4j properties to reflect the changed names
|
||||
* for java packages, class names and system variables.
|
||||
*
|
||||
* @param esLog4JProps existing elasticsearch log4j properties.
|
||||
* @return updated properties for OpenSearch.
|
||||
*/
|
||||
private Properties renameValues(Properties esLog4JProps) {
|
||||
final Properties props = new Properties();
|
||||
for (Map.Entry<Object, Object> entry : esLog4JProps.entrySet()) {
|
||||
final String key = (String) entry.getKey();
|
||||
final String value = (String) entry.getValue();
|
||||
final String newKey = key.replaceAll("esmessagefields", "opensearchmessagefields");
|
||||
final String newValue = value.replaceAll("ESJsonLayout", "OpenSearchJsonLayout")
|
||||
.replaceAll("sys:es.logs", "sys:opensearch.logs")
|
||||
.replaceAll("org.elasticsearch", "org.opensearch");
|
||||
props.setProperty(newKey, newValue);
|
||||
}
|
||||
return props;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.settings.SettingsException;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.yaml.YamlXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Imports settings from an existing elasticsearch installation.
|
||||
*/
|
||||
class ImportYmlConfigTask implements UpgradeTask {
|
||||
private static final String ES_CONFIG_FILENAME = "elasticsearch.yml";
|
||||
private static final String OPENSEARCH_CONFIG_FILENAME = "opensearch.yml";
|
||||
static final String HEADER = "# ======================== OpenSearch Configuration =========================\n"
|
||||
+ "# NOTE: The settings in this file are imported from an existing Elasticsearch\n"
|
||||
+ "# installation using the opensearch-upgrade tool. The original file is\n"
|
||||
+ "# backed up in this directory as opensearch.yml.bkp for reference.\n\n"
|
||||
+ "# Please consult the documentation for further information:\n"
|
||||
+ "# https://www.opensearch.org\n"
|
||||
+ "#\n";
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
try {
|
||||
terminal.println("Importing settings from elasticsearch.yml ...");
|
||||
final Path openSearchYmlPath = taskInput.getOpenSearchConfig().resolve(OPENSEARCH_CONFIG_FILENAME);
|
||||
final Path esYamlPath = taskInput.getEsConfig().resolve(ES_CONFIG_FILENAME);
|
||||
final Settings esSettings = Settings.builder().loadFromPath(esYamlPath).build();
|
||||
final Settings settings = Settings.builder().loadFromPath(openSearchYmlPath).build();
|
||||
if (esSettings.size() > 0) {
|
||||
if (settings.size() > 0
|
||||
&& terminal.promptYesNo("Existing settings in opensearch.yml will be overwritten, proceed?", false) == false) {
|
||||
terminal.println("Import settings cancelled by user");
|
||||
}
|
||||
final Path backupYmlPath = taskInput.getOpenSearchConfig().resolve(OPENSEARCH_CONFIG_FILENAME + ".bkp");
|
||||
if (!Files.exists(backupYmlPath)
|
||||
|| terminal.promptYesNo("A backup file for opensearch.yml already exists, overwrite?", false)) {
|
||||
Files.copy(openSearchYmlPath, backupYmlPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
Files.write(openSearchYmlPath, Collections.singleton(HEADER), StandardOpenOption.TRUNCATE_EXISTING);
|
||||
final Settings mergeSettings = mergeSettings(settings, esSettings);
|
||||
writeSettings(openSearchYmlPath, mergeSettings);
|
||||
}
|
||||
terminal.println("Success!" + System.lineSeparator());
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("Error importing settings from elasticsearch.yml, " + ex);
|
||||
}
|
||||
}
|
||||
|
||||
// package private for unit testing
|
||||
Settings mergeSettings(final Settings first, final Settings second) {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
for (String key : first.keySet()) {
|
||||
builder.copy(key, key, first);
|
||||
}
|
||||
for (String key : second.keySet()) {
|
||||
builder.copy(key, key, second);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings to the config file on the file system. It uses the {@link XContentBuilder}
|
||||
* to build the YAML content and write it to the output stream.
|
||||
*
|
||||
* @param configYml path to a yml file where config will be written to.
|
||||
* @param settings the settings to write
|
||||
* @throws IOException exception during writing to the output stream.
|
||||
*/
|
||||
private void writeSettings(final Path configYml, final Settings settings) throws IOException {
|
||||
try (
|
||||
OutputStream os = Files.newOutputStream(configYml, StandardOpenOption.APPEND);
|
||||
XContentBuilder builder = new XContentBuilder(YamlXContent.yamlXContent, os)
|
||||
) {
|
||||
builder.startObject();
|
||||
final Map<String, String> params = new HashMap<>();
|
||||
params.put("flat_settings", "true");
|
||||
settings.toXContent(builder, new ToXContent.MapParams(params));
|
||||
builder.endObject();
|
||||
builder.flush();
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to write settings to " + configYml.toString(), e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Installs the list of plugins using the opensearch-plugin command.
|
||||
*/
|
||||
class InstallPluginsTask implements UpgradeTask {
|
||||
private static final String ERROR_MSG = "Error installing plugin %s. Please install it manually.";
|
||||
|
||||
/** The list of official plugins that can be installed by the upgrade tool. */
|
||||
static final Set<String> OFFICIAL_PLUGINS;
|
||||
static {
|
||||
try (
|
||||
InputStream stream = InstallPluginsTask.class.getResourceAsStream("/plugins.txt");
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))
|
||||
) {
|
||||
Set<String> plugins = new HashSet<>();
|
||||
String line = reader.readLine();
|
||||
while (line != null) {
|
||||
plugins.add(line.trim());
|
||||
line = reader.readLine();
|
||||
}
|
||||
OFFICIAL_PLUGINS = Collections.unmodifiableSet(plugins);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
if (taskInput.getPlugins() == null || taskInput.getPlugins().isEmpty()) {
|
||||
return;
|
||||
}
|
||||
terminal.println("Installing core plugins ...");
|
||||
List<String> manualPlugins = new ArrayList<>();
|
||||
|
||||
for (String plugin : taskInput.getPlugins()) {
|
||||
if (OFFICIAL_PLUGINS.contains(plugin)) {
|
||||
executeInstallPluginCommand(plugin, taskInput, terminal);
|
||||
} else {
|
||||
manualPlugins.add(plugin);
|
||||
}
|
||||
}
|
||||
if (!manualPlugins.isEmpty()) {
|
||||
terminal.println("Please install the following custom plugins manually: " + manualPlugins);
|
||||
}
|
||||
terminal.println("Success!" + System.lineSeparator());
|
||||
}
|
||||
|
||||
// package private for unit testing
|
||||
void executeInstallPluginCommand(String plugin, TaskInput taskInput, Terminal terminal) {
|
||||
ProcessBuilder processBuilder = getProcessBuilderBasedOnOS(plugin, taskInput);
|
||||
try {
|
||||
final Process process = processBuilder.inheritIO().start();
|
||||
if (process.waitFor() != 0) {
|
||||
terminal.errorPrint(Terminal.Verbosity.NORMAL, String.format(Locale.getDefault(), ERROR_MSG, plugin));
|
||||
}
|
||||
} catch (IOException | InterruptedException e) {
|
||||
terminal.errorPrint(Terminal.Verbosity.NORMAL, String.format(Locale.getDefault(), ERROR_MSG, plugin) + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// package private for unit testing
|
||||
ProcessBuilder getProcessBuilderBasedOnOS(String plugin, TaskInput taskInput) {
|
||||
final String command = taskInput.getOpenSearchBin().resolve("opensearch-plugin") + " install " + plugin;
|
||||
final ProcessBuilder processBuilder = new ProcessBuilder();
|
||||
if (OS.WINDOWS == OS.current()) {
|
||||
processBuilder.command("cmd.exe", "/c", command);
|
||||
} else {
|
||||
processBuilder.command("sh", "-c", command);
|
||||
}
|
||||
return processBuilder;
|
||||
}
|
||||
|
||||
private enum OS {
|
||||
WINDOWS,
|
||||
MAC,
|
||||
LINUX;
|
||||
|
||||
public static OS current() {
|
||||
final String os = System.getProperty("os.name", "");
|
||||
if (os.startsWith("Windows")) {
|
||||
return OS.WINDOWS;
|
||||
}
|
||||
if (os.startsWith("Linux") || os.startsWith("LINUX")) {
|
||||
return OS.LINUX;
|
||||
}
|
||||
if (os.startsWith("Mac")) {
|
||||
return OS.MAC;
|
||||
}
|
||||
throw new IllegalStateException("Can't determine OS from: " + os);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.env.Environment;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* A plain old java object, that contains the information used by tasks
|
||||
* in the upgrade process.
|
||||
*/
|
||||
class TaskInput {
|
||||
private final Environment openSearchEnv;
|
||||
private String node;
|
||||
private String cluster;
|
||||
private String baseUrl;
|
||||
private boolean running;
|
||||
private Version version;
|
||||
private List<String> plugins;
|
||||
private Path esHome;
|
||||
private Path esConfig;
|
||||
|
||||
TaskInput(Environment openSearchEnv) {
|
||||
this.openSearchEnv = openSearchEnv;
|
||||
}
|
||||
|
||||
public String getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
public void setNode(String node) {
|
||||
this.node = node;
|
||||
}
|
||||
|
||||
public String getCluster() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
public void setCluster(String cluster) {
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
public Optional<Version> getVersion() {
|
||||
return Optional.ofNullable(version);
|
||||
}
|
||||
|
||||
public void setVersion(Version version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public List<String> getPlugins() {
|
||||
return plugins;
|
||||
}
|
||||
|
||||
public void setPlugins(List<String> plugins) {
|
||||
this.plugins = plugins;
|
||||
}
|
||||
|
||||
public Path getEsConfig() {
|
||||
return esConfig;
|
||||
}
|
||||
|
||||
public void setEsConfig(Path esConfig) {
|
||||
this.esConfig = esConfig;
|
||||
}
|
||||
|
||||
public Path getEsHome() {
|
||||
return esHome;
|
||||
}
|
||||
|
||||
public void setEsHome(Path esHome) {
|
||||
this.esHome = esHome;
|
||||
}
|
||||
|
||||
public Path getOpenSearchConfig() {
|
||||
return openSearchEnv.configFile();
|
||||
}
|
||||
|
||||
public Path getOpenSearchBin() {
|
||||
return openSearchEnv.binFile();
|
||||
}
|
||||
|
||||
public boolean isRunning() {
|
||||
return running;
|
||||
}
|
||||
|
||||
public void setRunning(boolean running) {
|
||||
this.running = running;
|
||||
}
|
||||
|
||||
public String getBaseUrl() {
|
||||
return baseUrl;
|
||||
}
|
||||
|
||||
public void setBaseUrl(String baseUrl) {
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import org.opensearch.cli.EnvironmentAwareCommand;
|
||||
import org.opensearch.cli.ExitCodes;
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.cli.UserException;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.env.Environment;
|
||||
|
||||
/**
|
||||
* This class extends the existing opensearch-cli and provides the entry
|
||||
* point for the opensearch-upgrade tool.
|
||||
* <p>
|
||||
* This class is agnostic of the actual logic which performs the upgrade
|
||||
* on the node.
|
||||
*/
|
||||
public class UpgradeCli extends EnvironmentAwareCommand {
|
||||
|
||||
/**
|
||||
* Constructor to create an instance of UpgradeCli.
|
||||
*/
|
||||
public UpgradeCli() {
|
||||
super("A CLI tool for upgrading to OpenSearch 1.x from a supported Elasticsearch version.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Entrypoint for the upgrade tool.
|
||||
*
|
||||
* @param args args to main.
|
||||
* @throws Exception exception thrown during the execution of the UpgradeCli.
|
||||
*/
|
||||
public static void main(String[] args) throws Exception {
|
||||
exit(new UpgradeCli().main(args, Terminal.DEFAULT));
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the upgrade task. This retrieves an instance of {@link UpgradeTask} which is composed
|
||||
* of smaller individual tasks that perform specific operations as part of the overall process.
|
||||
*
|
||||
* @param terminal current terminal the command is running
|
||||
* @param options options supplied to the command
|
||||
* @param env current environment in which this cli tool is running.
|
||||
* @throws UserException if any exception is thrown from the tasks
|
||||
*/
|
||||
@Override
|
||||
protected void execute(final Terminal terminal, final OptionSet options, final Environment env) throws UserException {
|
||||
try {
|
||||
final Tuple<TaskInput, Terminal> input = new Tuple<>(new TaskInput(env), terminal);
|
||||
UpgradeTask.getTask().accept(input);
|
||||
terminal.println("Done!");
|
||||
terminal.println("Next Steps: ");
|
||||
terminal.println(" Stop the running elasticsearch on this node.");
|
||||
terminal.println(" Start OpenSearch on this node.");
|
||||
} catch (RuntimeException ex) {
|
||||
throw new UserException(ExitCodes.DATA_ERROR, ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* An interface for an upgrade task, which in this instance is an unit of
|
||||
* operation that is part of the overall upgrade process. This extends the
|
||||
* {@link java.util.function.Consumer} interface.
|
||||
*
|
||||
* The implementing tasks consume and instance of a tuple of {@link TaskInput}
|
||||
* and {@link Terminal} and operate via side effects.
|
||||
*
|
||||
*/
|
||||
interface UpgradeTask extends Consumer<Tuple<TaskInput, Terminal>> {
|
||||
/**
|
||||
* Composes the individual tasks to create a pipeline for the overall upgrade task.
|
||||
*
|
||||
* @return an instance of {@link java.util.function.Consumer} that takes a tuple of
|
||||
* task input and the current terminal. The composed task fails if any of the
|
||||
* individual tasks fails.
|
||||
*/
|
||||
static Consumer<Tuple<TaskInput, Terminal>> getTask() {
|
||||
return new DetectEsInstallationTask().andThen(new ValidateInputTask())
|
||||
.andThen(new ImportYmlConfigTask())
|
||||
.andThen(new ImportJvmOptionsTask())
|
||||
.andThen(new ImportLog4jPropertiesTask())
|
||||
.andThen(new InstallPluginsTask())
|
||||
.andThen(new ImportKeystoreTask());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Validates the input {@link TaskInput} for the upgrade.
|
||||
*/
|
||||
class ValidateInputTask implements UpgradeTask {
|
||||
|
||||
@Override
|
||||
public void accept(final Tuple<TaskInput, Terminal> input) {
|
||||
final TaskInput taskInput = input.v1();
|
||||
final Terminal terminal = input.v2();
|
||||
|
||||
terminal.println("Verifying the details ...");
|
||||
// check if the elasticsearch version is supported
|
||||
if (taskInput.getVersion().isPresent()) {
|
||||
final Version version = taskInput.getVersion().get();
|
||||
if (version.equals(LegacyESVersion.V_7_10_2) == false) {
|
||||
throw new RuntimeException(
|
||||
String.format(Locale.getDefault(), "The installed version %s of elasticsearch is not supported.", version)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
terminal.println("Unable to detect installed elasticsearch version.");
|
||||
confirmToProceed(terminal);
|
||||
}
|
||||
// check if the OpenSearch config is set to an external location
|
||||
if (taskInput.getOpenSearchConfig().getParent().equals(taskInput.getOpenSearchBin().getParent())) {
|
||||
terminal.println(
|
||||
"OpenSearch config directory is set inside the installation directory. "
|
||||
+ "It is recommended to use an external config directory and set the environment variable "
|
||||
+ "OPENSEARCH_PATH_CONF to it."
|
||||
);
|
||||
confirmToProceed(terminal);
|
||||
}
|
||||
|
||||
// print summary and confirm with user if everything looks correct.
|
||||
final Map<String, String> fieldsMap = getSummaryFieldsMap(taskInput);
|
||||
final String format = " %-25s | %s";
|
||||
terminal.println("+----------------------- SUMMARY -----------------------+");
|
||||
for (Map.Entry<String, String> entry : fieldsMap.entrySet()) {
|
||||
terminal.println(String.format(Locale.getDefault(), format, entry.getKey(), entry.getValue()));
|
||||
}
|
||||
terminal.println("+-------------------------------------------------------+");
|
||||
terminal.println("Please verify if everything above looks good.");
|
||||
confirmToProceed(terminal);
|
||||
}
|
||||
|
||||
private void confirmToProceed(final Terminal terminal) {
|
||||
terminal.println(System.lineSeparator());
|
||||
if (terminal.promptYesNo("Do you want to proceed?", false) == false) {
|
||||
throw new RuntimeException("Upgrade cancelled by user.");
|
||||
}
|
||||
}
|
||||
|
||||
// package private for unit testing
|
||||
Map<String, String> getSummaryFieldsMap(final TaskInput taskInput) {
|
||||
final String version = taskInput.getVersion().isPresent() ? taskInput.getVersion().get().toString() : "unknown";
|
||||
|
||||
final Map<String, String> fields = new LinkedHashMap<>();
|
||||
fields.put("Cluster", taskInput.getCluster());
|
||||
fields.put("Node", taskInput.getNode());
|
||||
fields.put("Endpoint", taskInput.getBaseUrl());
|
||||
fields.put("Elasticsearch Version", version);
|
||||
fields.put("Elasticsearch Config", taskInput.getEsConfig().toString());
|
||||
fields.put("Elasticsearch Plugins", taskInput.getPlugins() == null ? "[]" : taskInput.getPlugins().toString());
|
||||
fields.put("OpenSearch Config", taskInput.getOpenSearchConfig().toString());
|
||||
|
||||
return fields;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This package contains the classes for the upgrade CLI tool.
|
||||
* This tool automates the configuring of a node that is to be upgraded to
|
||||
* OpenSearch from an existing Elasticsearch (v7.10.2 and v6.8.0) installation.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Contains the classes which implement a CLI tool for opensearch-upgrade which is
|
||||
* bundled into the distribution and available inside $ES_HOME/bin.
|
||||
*/
|
||||
package org.opensearch.upgrade;
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.opensearch.cli.MockTerminal;
|
||||
import org.opensearch.cli.Terminal;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class DetectEsInstallationTaskTests extends OpenSearchTestCase {
|
||||
|
||||
private final MockTerminal terminal = new MockTerminal();
|
||||
private DetectEsInstallationTask task;
|
||||
private Environment env;
|
||||
|
||||
@Before
|
||||
public void setUpTask() {
|
||||
task = new DetectEsInstallationTask();
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", "").build());
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Read config directory from test resources.")
|
||||
public void testTaskExecution() throws Exception {
|
||||
Path esConfig = new File(getClass().getResource("/config").getPath()).toPath();
|
||||
// path for es_home
|
||||
terminal.addTextInput(esConfig.getParent().toString());
|
||||
// path for es_config
|
||||
terminal.addTextInput(esConfig.toString());
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
Tuple<TaskInput, Terminal> input = new Tuple<>(taskInput, terminal);
|
||||
|
||||
task.accept(input);
|
||||
|
||||
assertThat(taskInput.getEsConfig(), is(esConfig));
|
||||
assertThat(taskInput.getBaseUrl(), is("http://localhost:9200"));
|
||||
assertThat(taskInput.getPlugins(), hasSize(0));
|
||||
assertThat(taskInput.getNode(), is("node-x"));
|
||||
assertThat(taskInput.getCluster(), is("my-cluster"));
|
||||
}
|
||||
|
||||
public void testRetrieveUrlFromSettings() {
|
||||
Settings esSettings = Settings.builder().put("http.port", "9201").build();
|
||||
|
||||
assertThat(task.retrieveUrl(esSettings), is("http://localhost:9201"));
|
||||
}
|
||||
|
||||
public void testRetrieveDefaultUrlFromConfig() {
|
||||
assertThat(task.retrieveUrl(Settings.EMPTY), is("http://localhost:9200"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import com.google.common.jimfs.Configuration;
|
||||
import com.google.common.jimfs.Jimfs;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.cli.MockTerminal;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.io.PathUtilsForTesting;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class ImportLog4jPropertiesTaskTests extends OpenSearchTestCase {
|
||||
private final MockTerminal terminal = new MockTerminal();
|
||||
private final List<FileSystem> fileSystems = new ArrayList<>();
|
||||
private ImportLog4jPropertiesTask task;
|
||||
private Environment env;
|
||||
|
||||
@Before
|
||||
public void setUpTask() throws IOException {
|
||||
task = new ImportLog4jPropertiesTask();
|
||||
final Configuration configuration;
|
||||
configuration = Configuration.unix().toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
|
||||
FileSystem fs = Jimfs.newFileSystem(configuration);
|
||||
fileSystems.add(fs);
|
||||
PathUtilsForTesting.installMock(fs);
|
||||
Path home = fs.getPath("test-home");
|
||||
Path config = home.resolve("config");
|
||||
Files.createDirectories(config);
|
||||
Files.createFile(config.resolve(ImportLog4jPropertiesTask.LOG4J_PROPERTIES));
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Read config directory from test resources.")
|
||||
public void testImportLog4jPropertiesTask() throws IOException {
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
Path esConfig = new File(getClass().getResource("/config").getPath()).toPath();
|
||||
taskInput.setEsConfig(esConfig);
|
||||
task.accept(new Tuple<>(taskInput, terminal));
|
||||
|
||||
Properties properties = new Properties();
|
||||
properties.load(Files.newInputStream(taskInput.getOpenSearchConfig().resolve(ImportLog4jPropertiesTask.LOG4J_PROPERTIES)));
|
||||
assertThat(properties, is(notNullValue()));
|
||||
assertThat(properties.entrySet(), hasSize(137));
|
||||
assertThat(properties.get("appender.rolling.layout.type"), equalTo("OpenSearchJsonLayout"));
|
||||
assertThat(
|
||||
properties.get("appender.deprecation_rolling.fileName"),
|
||||
equalTo("${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.json")
|
||||
);
|
||||
assertThat(properties.get("logger.deprecation.name"), equalTo("org.opensearch.deprecation"));
|
||||
assertThat(properties.keySet(), not(hasItem("appender.deprecation_rolling.layout.esmessagefields")));
|
||||
assertThat(properties.keySet(), hasItem("appender.deprecation_rolling.layout.opensearchmessagefields"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import com.google.common.jimfs.Configuration;
|
||||
import com.google.common.jimfs.Jimfs;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.cli.MockTerminal;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.io.PathUtilsForTesting;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class ImportYmlConfigTaskTests extends OpenSearchTestCase {
|
||||
private final MockTerminal terminal = new MockTerminal();
|
||||
private final List<FileSystem> fileSystems = new ArrayList<>();
|
||||
private ImportYmlConfigTask task;
|
||||
private Environment env;
|
||||
|
||||
@Before
|
||||
public void setUpTask() throws IOException {
|
||||
task = new ImportYmlConfigTask();
|
||||
final Configuration configuration;
|
||||
configuration = Configuration.unix().toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
|
||||
FileSystem fs = Jimfs.newFileSystem(configuration);
|
||||
fileSystems.add(fs);
|
||||
PathUtilsForTesting.installMock(fs);
|
||||
Path home = fs.getPath("test-home");
|
||||
Path config = home.resolve("config");
|
||||
Files.createDirectories(config);
|
||||
Files.createFile(config.resolve("opensearch.yml"));
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Read config directory from test resources.")
|
||||
public void testImportYmlConfigTask() throws IOException {
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
Path esConfig = new File(getClass().getResource("/config").getPath()).toPath();
|
||||
taskInput.setEsConfig(esConfig);
|
||||
task.accept(new Tuple<>(taskInput, terminal));
|
||||
Settings settings = Settings.builder().loadFromPath(taskInput.getOpenSearchConfig().resolve("opensearch.yml")).build();
|
||||
assertThat(settings.keySet(), contains("cluster.name", "node.name", "path.data", "path.logs"));
|
||||
assertThat(settings.get("cluster.name"), is("my-cluster"));
|
||||
assertThat(settings.get("node.name"), is("node-x"));
|
||||
assertThat(settings.get("path.data"), is("[/mnt/data_1, /mnt/data_2]"));
|
||||
assertThat(settings.get("path.logs"), is("/var/log/eslogs"));
|
||||
}
|
||||
|
||||
public void testMergeSettings() {
|
||||
Settings first = Settings.builder().put("setting_one", "value_one").build();
|
||||
Settings second = Settings.builder().put("setting_two", "value_two").build();
|
||||
Settings merged = task.mergeSettings(first, second);
|
||||
assertThat(merged.keySet(), contains("setting_one", "setting_two"));
|
||||
assertThat(merged.get("setting_one"), is("value_one"));
|
||||
assertThat(merged.get("setting_two"), is("value_two"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.mock.orig.Mockito;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.cli.MockTerminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class InstallPluginsTaskTests extends OpenSearchTestCase {
|
||||
|
||||
private final MockTerminal terminal = new MockTerminal();
|
||||
private InstallPluginsTask task;
|
||||
private Environment env;
|
||||
|
||||
private static final String OFFICIAL_PLUGIN = "analysis-icu";
|
||||
private static final String CUSTOM_PLUGIN = "job-scheduler";
|
||||
|
||||
@Before
|
||||
public void setUpTask() throws IOException {
|
||||
task = new InstallPluginsTask();
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", "").build());
|
||||
}
|
||||
|
||||
public void testInstallPluginsTaskWithOfficialPlugin() throws IOException {
|
||||
InstallPluginsTask spyTask = spy(task);
|
||||
TaskInput taskInput = createTaskInputWithPlugin(OFFICIAL_PLUGIN);
|
||||
spyTask.accept(new Tuple<>(taskInput, terminal));
|
||||
|
||||
verify(spyTask, Mockito.atLeast(1)).executeInstallPluginCommand(OFFICIAL_PLUGIN, taskInput, terminal);
|
||||
}
|
||||
|
||||
public void testInstallPluginsTaskWithCustomPlugin() throws IOException {
|
||||
TaskInput taskInput = createTaskInputWithPlugin(CUSTOM_PLUGIN);
|
||||
task.accept(new Tuple<>(taskInput, terminal));
|
||||
|
||||
assertThat(terminal.getOutput(), containsString("Please install the following custom plugins manually"));
|
||||
}
|
||||
|
||||
public void testGetCommandsBasedOnOS() {
|
||||
TaskInput taskInput = createTaskInputWithPlugin(OFFICIAL_PLUGIN);
|
||||
List<String> commandsList = task.getProcessBuilderBasedOnOS(OFFICIAL_PLUGIN, taskInput).command();
|
||||
|
||||
final String os = System.getProperty("os.name", "");
|
||||
if (os.startsWith("Windows")) {
|
||||
assertEquals("cmd.exe", commandsList.get(0));
|
||||
} else {
|
||||
assertEquals("sh", commandsList.get(0));
|
||||
}
|
||||
}
|
||||
|
||||
private TaskInput createTaskInputWithPlugin(String plugin) {
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
List<String> pluginsList = new ArrayList<>();
|
||||
pluginsList.add(plugin);
|
||||
taskInput.setPlugins(pluginsList);
|
||||
return taskInput;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,159 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import com.google.common.jimfs.Configuration;
|
||||
import com.google.common.jimfs.Jimfs;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.cli.Command;
|
||||
import org.opensearch.cli.CommandTestCase;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.io.PathUtilsForTesting;
|
||||
import org.opensearch.common.settings.KeyStoreWrapper;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.core.internal.io.IOUtils;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class UpgradeCliTests extends CommandTestCase {
|
||||
private final List<FileSystem> fileSystems = new ArrayList<>();
|
||||
private Environment env;
|
||||
|
||||
@Before
|
||||
public void setupEnv() throws IOException {
|
||||
final Configuration configuration;
|
||||
configuration = Configuration.unix().toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
|
||||
FileSystem fs = Jimfs.newFileSystem(configuration);
|
||||
fileSystems.add(fs);
|
||||
PathUtilsForTesting.installMock(fs);
|
||||
Path home = fs.getPath("test-home");
|
||||
Path config = home.resolve("config");
|
||||
Files.createDirectories(config);
|
||||
Files.createFile(config.resolve("opensearch.yml"));
|
||||
Files.createDirectory(config.resolve("jvm.options.d"));
|
||||
Files.createFile(config.resolve("log4j2.properties"));
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
|
||||
}
|
||||
|
||||
@After
|
||||
public void closeMockFileSystems() throws IOException {
|
||||
IOUtils.close(fileSystems);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new UpgradeCli() {
|
||||
@Override
|
||||
protected Environment createEnv(Map<String, String> settings) {
|
||||
return env;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Read config directory from test resources.")
|
||||
public void testUpgrade() throws Exception {
|
||||
String passwd = "keystorepassword";
|
||||
|
||||
Path esConfig = new File(getClass().getResource("/config").getPath()).toPath();
|
||||
// path for es_home
|
||||
terminal.addTextInput(esConfig.getParent().toString());
|
||||
// path for es_config
|
||||
terminal.addTextInput(esConfig.toString());
|
||||
// input for prompt 'config directory is inside installation'
|
||||
terminal.addTextInput("y");
|
||||
// input for prompt 'es version not detected'
|
||||
terminal.addTextInput("y");
|
||||
// input for prompt 'confirm the details look good'
|
||||
terminal.addTextInput("y");
|
||||
// as the keystore is password protected, we set it.
|
||||
terminal.addSecretInput(passwd);
|
||||
|
||||
execute();
|
||||
|
||||
assertYmlConfigImported();
|
||||
assertKeystoreImported(passwd);
|
||||
assertJvmOptionsImported();
|
||||
assertLog4jPropertiesImported();
|
||||
}
|
||||
|
||||
private void assertYmlConfigImported() throws IOException {
|
||||
String[] headers = ImportYmlConfigTask.HEADER.split("[\\r\\n]+");
|
||||
List<String> expectedSettings = new ArrayList<>();
|
||||
expectedSettings.addAll(Arrays.asList(headers));
|
||||
// this is the generated flat settings
|
||||
expectedSettings.addAll(
|
||||
Arrays.asList(
|
||||
"---",
|
||||
"cluster.name: \"my-cluster\"",
|
||||
"node.name: \"node-x\"",
|
||||
"path.data:",
|
||||
"- \"/mnt/data_1\"",
|
||||
"- \"/mnt/data_2\"",
|
||||
"path.logs: \"/var/log/eslogs\""
|
||||
)
|
||||
);
|
||||
List<String> actualSettings = Files.readAllLines(env.configFile().resolve("opensearch.yml"))
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.filter(line -> !line.isEmpty())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
assertThat(actualSettings, equalTo(expectedSettings));
|
||||
}
|
||||
|
||||
private void assertKeystoreImported(String passwd) throws IOException, GeneralSecurityException {
|
||||
// assert keystore is created
|
||||
KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile());
|
||||
assertNotNull(keystore);
|
||||
|
||||
// assert all keystore settings are imported
|
||||
keystore.decrypt(passwd.toCharArray());
|
||||
assertThat(keystore.getSettingNames(), hasItems(KeyStoreWrapper.SEED_SETTING.getKey(), "test.setting.key", "test.setting.file"));
|
||||
assertThat(keystore.getString("test.setting.key").toString(), is("test.setting.value"));
|
||||
InputStream is = keystore.getFile("test.setting.file");
|
||||
byte[] bytes = new byte[is.available()];
|
||||
assertThat(is.read(bytes), greaterThan(0));
|
||||
String actual = StandardCharsets.UTF_8.decode(ByteBuffer.wrap(bytes)).toString();
|
||||
String expected = "{\"some_key\": \"some_val\"}";
|
||||
assertThat(actual, is(expected));
|
||||
}
|
||||
|
||||
private void assertJvmOptionsImported() throws IOException, GeneralSecurityException {
|
||||
Path path = env.configFile().resolve("jvm.options.d");
|
||||
assertThat(Files.exists(path), is(true));
|
||||
assertThat(Files.isDirectory(path), is(true));
|
||||
assertThat(Files.exists(path.resolve("test.options")), is(true));
|
||||
}
|
||||
|
||||
private void assertLog4jPropertiesImported() throws IOException, GeneralSecurityException {
|
||||
assertThat(Files.exists(env.configFile().resolve("log4j2.properties")), is(true));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
package org.opensearch.upgrade;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.cli.MockTerminal;
|
||||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.io.PathUtils;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.env.TestEnvironment;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class ValidateInputTaskTests extends OpenSearchTestCase {
|
||||
|
||||
private ValidateInputTask task;
|
||||
private MockTerminal terminal;
|
||||
private Environment env;
|
||||
|
||||
@Before
|
||||
public void setTask() {
|
||||
task = new ValidateInputTask();
|
||||
terminal = new MockTerminal();
|
||||
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", "test_home").build());
|
||||
}
|
||||
|
||||
public void testUnsupportedEsVersion() {
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
taskInput.setVersion(LegacyESVersion.V_7_10_1);
|
||||
|
||||
final RuntimeException e = expectThrows(RuntimeException.class, () -> task.accept(new Tuple<>(taskInput, terminal)));
|
||||
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("The installed version 7.10.1 of elasticsearch is not supported."));
|
||||
}
|
||||
|
||||
public void testGetSummaryFields() {
|
||||
TaskInput taskInput = new TaskInput(env);
|
||||
taskInput.setEsConfig(PathUtils.get("es_home"));
|
||||
taskInput.setCluster("some-cluster");
|
||||
taskInput.setNode("some-node");
|
||||
taskInput.setVersion(LegacyESVersion.V_7_10_2);
|
||||
taskInput.setBaseUrl("some-url");
|
||||
taskInput.setPlugins(Arrays.asList("plugin-1", "plugin-2"));
|
||||
|
||||
Map<String, String> summary = task.getSummaryFieldsMap(taskInput);
|
||||
|
||||
assertThat(summary.entrySet(), hasSize(7));
|
||||
assertThat(summary.get("Cluster"), is("some-cluster"));
|
||||
assertThat(summary.get("Node"), is("some-node"));
|
||||
assertThat(summary.get("Endpoint"), is("some-url"));
|
||||
assertThat(summary.get("Elasticsearch Version"), is("7.10.2"));
|
||||
assertThat(summary.get("Elasticsearch Plugins"), is("[plugin-1, plugin-2]"));
|
||||
assertThat(summary.get("Elasticsearch Config"), is("es_home"));
|
||||
assertThat(summary.get("OpenSearch Config"), is(env.configFile().toString()));
|
||||
}
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,7 @@
|
|||
cluster.name: my-cluster
|
||||
node.name: node-x
|
||||
path:
|
||||
data:
|
||||
- /mnt/data_1
|
||||
- /mnt/data_2
|
||||
logs: /var/log/eslogs
|
|
@ -0,0 +1,2 @@
|
|||
-Xms2g
|
||||
-Xmx2g
|
|
@ -0,0 +1,178 @@
|
|||
status = error
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
######## Server JSON ############################
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.json
|
||||
appender.rolling.layout.type = ESJsonLayout
|
||||
appender.rolling.layout.type_name = server
|
||||
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policies.size.size = 128MB
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.fileIndex = nomax
|
||||
appender.rolling.strategy.action.type = Delete
|
||||
appender.rolling.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling.strategy.action.condition.type = IfFileName
|
||||
appender.rolling.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
######## Server - old style pattern ###########
|
||||
appender.rolling_old.type = RollingFile
|
||||
appender.rolling_old.name = rolling_old
|
||||
appender.rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling_old.layout.type = PatternLayout
|
||||
appender.rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling_old.policies.type = Policies
|
||||
appender.rolling_old.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.time.interval = 1
|
||||
appender.rolling_old.policies.time.modulate = true
|
||||
appender.rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.size.size = 128MB
|
||||
appender.rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling_old.strategy.fileIndex = nomax
|
||||
appender.rolling_old.strategy.action.type = Delete
|
||||
appender.rolling_old.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling_old.strategy.action.condition.type = IfFileName
|
||||
appender.rolling_old.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
rootLogger.appenderRef.rolling_old.ref = rolling_old
|
||||
|
||||
######## Deprecation JSON #######################
|
||||
appender.deprecation_rolling.type = RollingFile
|
||||
appender.deprecation_rolling.name = deprecation_rolling
|
||||
appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json
|
||||
appender.deprecation_rolling.layout.type = ESJsonLayout
|
||||
appender.deprecation_rolling.layout.type_name = deprecation
|
||||
appender.deprecation_rolling.layout.esmessagefields=x-opaque-id
|
||||
appender.deprecation_rolling.filter.rate_limit.type = RateLimitingFilter
|
||||
|
||||
appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz
|
||||
appender.deprecation_rolling.policies.type = Policies
|
||||
appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling.policies.size.size = 1GB
|
||||
appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling.strategy.max = 4
|
||||
|
||||
appender.header_warning.type = HeaderWarningAppender
|
||||
appender.header_warning.name = header_warning
|
||||
#################################################
|
||||
######## Deprecation - old style pattern #######
|
||||
appender.deprecation_rolling_old.type = RollingFile
|
||||
appender.deprecation_rolling_old.name = deprecation_rolling_old
|
||||
appender.deprecation_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_rolling_old.layout.type = PatternLayout
|
||||
appender.deprecation_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.deprecation_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_deprecation-%i.log.gz
|
||||
appender.deprecation_rolling_old.policies.type = Policies
|
||||
appender.deprecation_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling_old.policies.size.size = 1GB
|
||||
appender.deprecation_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.deprecation.name = org.elasticsearch.deprecation
|
||||
logger.deprecation.level = deprecation
|
||||
logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
|
||||
logger.deprecation.appenderRef.deprecation_rolling_old.ref = deprecation_rolling_old
|
||||
logger.deprecation.appenderRef.header_warning.ref = header_warning
|
||||
logger.deprecation.additivity = false
|
||||
|
||||
######## Search slowlog JSON ####################
|
||||
appender.index_search_slowlog_rolling.type = RollingFile
|
||||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog.json
|
||||
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
|
||||
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id
|
||||
|
||||
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog-%i.json.gz
|
||||
appender.index_search_slowlog_rolling.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Search slowlog - old style pattern ####
|
||||
appender.index_search_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_search_slowlog_rolling_old.name = index_search_slowlog_rolling_old
|
||||
appender.index_search_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog.log
|
||||
appender.index_search_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_search_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_search_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog-%i.log.gz
|
||||
appender.index_search_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.index_search_slowlog_rolling.name = index.search.slowlog
|
||||
logger.index_search_slowlog_rolling.level = trace
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling_old.ref = index_search_slowlog_rolling_old
|
||||
logger.index_search_slowlog_rolling.additivity = false
|
||||
|
||||
######## Indexing slowlog JSON ##################
|
||||
appender.index_indexing_slowlog_rolling.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.json
|
||||
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
|
||||
appender.index_indexing_slowlog_rolling.layout.esmessagefields=message,took,took_millis,doc_type,id,routing,source
|
||||
|
||||
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.json.gz
|
||||
appender.index_indexing_slowlog_rolling.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Indexing slowlog - old style pattern ##
|
||||
appender.index_indexing_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling_old.name = index_indexing_slowlog_rolling_old
|
||||
appender.index_indexing_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.log
|
||||
appender.index_indexing_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_indexing_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_indexing_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.log.gz
|
||||
appender.index_indexing_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
|
||||
logger.index_indexing_slowlog.name = index.indexing.slowlog.index
|
||||
logger.index_indexing_slowlog.level = trace
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old
|
||||
logger.index_indexing_slowlog.additivity = false
|
|
@ -193,7 +193,11 @@ public class KeyStoreWrapper implements SecureSettings {
|
|||
|
||||
/** Returns a path representing the ES keystore in the given config dir. */
|
||||
public static Path keystorePath(Path configDir) {
|
||||
return configDir.resolve(KEYSTORE_FILENAME);
|
||||
return keystorePath(configDir, KEYSTORE_FILENAME);
|
||||
}
|
||||
|
||||
private static Path keystorePath(Path configDir, String keystoreFileName) {
|
||||
return configDir.resolve(keystoreFileName);
|
||||
}
|
||||
|
||||
/** Constructs a new keystore with the given password. */
|
||||
|
@ -217,24 +221,27 @@ public class KeyStoreWrapper implements SecureSettings {
|
|||
Arrays.fill(characters, (char)0);
|
||||
}
|
||||
|
||||
public static KeyStoreWrapper load(Path configDir) throws IOException {
|
||||
return load(configDir, KEYSTORE_FILENAME);
|
||||
}
|
||||
/**
|
||||
* Loads information about the OpenSearch keystore from the provided config directory.
|
||||
*
|
||||
* {@link #decrypt(char[])} must be called before reading or writing any entries.
|
||||
* Returns {@code null} if no keystore exists.
|
||||
*/
|
||||
public static KeyStoreWrapper load(Path configDir) throws IOException {
|
||||
Path keystoreFile = keystorePath(configDir);
|
||||
public static KeyStoreWrapper load(Path configDir, String keystoreFileName) throws IOException {
|
||||
Path keystoreFile = keystorePath(configDir, keystoreFileName);
|
||||
if (Files.exists(keystoreFile) == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
NIOFSDirectory directory = new NIOFSDirectory(configDir);
|
||||
try (IndexInput indexInput = directory.openInput(KEYSTORE_FILENAME, IOContext.READONCE)) {
|
||||
try (IndexInput indexInput = directory.openInput(keystoreFileName, IOContext.READONCE)) {
|
||||
ChecksumIndexInput input = new BufferedChecksumIndexInput(indexInput);
|
||||
final int formatVersion;
|
||||
try {
|
||||
formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, FORMAT_VERSION);
|
||||
formatVersion = CodecUtil.checkHeader(input, keystoreFileName, MIN_FORMAT_VERSION, FORMAT_VERSION);
|
||||
} catch (IndexFormatTooOldException e) {
|
||||
throw new IllegalStateException("The OpenSearch keystore [" + keystoreFile + "] format is too old. " +
|
||||
"You should delete and recreate it in order to upgrade.", e);
|
||||
|
|
|
@ -55,6 +55,7 @@ List projects = [
|
|||
'distribution:tools:launchers',
|
||||
'distribution:tools:plugin-cli',
|
||||
'distribution:tools:keystore-cli',
|
||||
'distribution:tools:upgrade-cli',
|
||||
'server',
|
||||
'server:cli',
|
||||
'test:framework',
|
||||
|
|
Loading…
Reference in New Issue