[Rename] Replace more instances of Elasticsearch with OpenSearch. (#432)
This commit replaces more replaceable instances of Elasticsearch with OpenSearch. Signed-off-by: Rabi Panda <adnapibar@gmail.com>
This commit is contained in:
parent
5ecb607c9d
commit
8bba6603da
|
@ -8,9 +8,8 @@ build-idea/
|
|||
out/
|
||||
|
||||
# include shared intellij config
|
||||
!.idea/scopes/x_pack.xml
|
||||
!.idea/inspectionProfiles/Project_Default.xml
|
||||
!.idea/runConfigurations/Debug_Elasticsearch.xml
|
||||
!.idea/runConfigurations/Debug_OpenSearch.xml
|
||||
|
||||
# These files are generated in the main tree by IntelliJ
|
||||
benchmarks/src/main/generated/*
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
<component name="DependencyValidationManager">
|
||||
<scope name="x-pack" pattern="file[*.x-pack]:*/||file[*.x-pack*]:*//*" />
|
||||
</component>
|
|
@ -146,6 +146,7 @@ tasks.register("verifyVersions") {
|
|||
// Read the list from maven central.
|
||||
// Fetch the metadata and parse the xml into Version instances because it's more straight forward here
|
||||
// rather than bwcVersion ( VersionCollection ).
|
||||
//TODO OpenSearch - Update the maven repo URL for OpenSearch when available.
|
||||
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
|
||||
BuildParams.bwcVersions.compareToAuthoritative(
|
||||
new XmlParser().parse(s)
|
||||
|
|
|
@ -66,7 +66,7 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
|
|||
/**
|
||||
* Registers internal distribution resolutions.
|
||||
* <p>
|
||||
* Elasticsearch distributions are resolved as project dependencies either representing
|
||||
* OpenSearch distributions are resolved as project dependencies either representing
|
||||
* the current version pointing to a project either under `:distribution:archives` or :distribution:packages`.
|
||||
* <p>
|
||||
* BWC versions are resolved as project to projects under `:distribution:bwc`.
|
||||
|
|
|
@ -4,7 +4,7 @@ This plugin provides empty REST and transport endpoints for bulk indexing and se
|
|||
|
||||
### Build Instructions
|
||||
|
||||
Build the plugin with `gradle :client:client-benchmark-noop-api-plugin:assemble` from the Elasticsearch root project directory.
|
||||
Build the plugin with `gradle :client:client-benchmark-noop-api-plugin:assemble` from the OpenSearch root project directory.
|
||||
|
||||
### Installation Instructions
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
|
|||
assertNull(newInstance.getException());
|
||||
}
|
||||
|
||||
public void testFromXContentWithElasticsearchException() throws IOException {
|
||||
public void testFromXContentWithOpenSearchException() throws IOException {
|
||||
String xContent =
|
||||
"{" +
|
||||
" \"error\": {" +
|
||||
|
|
|
@ -453,7 +453,7 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
searchSourceBuilder.highlighter(highlightBuilder);
|
||||
// end::search-request-highlighting
|
||||
searchSourceBuilder.query(QueryBuilders.boolQuery()
|
||||
.should(matchQuery("title", "Elasticsearch"))
|
||||
.should(matchQuery("title", "OpenSearch"))
|
||||
.should(matchQuery("user", "kimchy")));
|
||||
searchRequest.source(searchSourceBuilder);
|
||||
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
|
||||
|
@ -473,7 +473,7 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
HighlightField highlight = highlightFields.get("title");
|
||||
Text[] fragments = highlight.fragments();
|
||||
assertEquals(1, fragments.length);
|
||||
assertThat(fragments[0].string(), containsString("<em>Elasticsearch</em>"));
|
||||
assertThat(fragments[0].string(), containsString("<em>OpenSearch</em>"));
|
||||
highlight = highlightFields.get("user");
|
||||
fragments = highlight.fragments();
|
||||
assertEquals(1, fragments.length);
|
||||
|
|
|
@ -11,7 +11,7 @@ have been generated using the following openssl commands.
|
|||
[source,shell]
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
openssl genrsa -out testRootCA.key 2048
|
||||
openssl req -x509 -new -key testRootCA.key -days 1460 -subj "/CN=Elasticsearch Test Root CA/OU=elasticsearch/O=org" -out testRootCA.crt -config ./openssl_config.cnf
|
||||
openssl req -x509 -new -key testRootCA.key -days 1460 -subj "/CN=OpenSearch Test Root CA/OU=opensearch/O=org" -out testRootCA.crt -config ./openssl_config.cnf
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
|
||||
== Instructions on generating the Intermediate CA
|
||||
|
@ -20,7 +20,7 @@ The `testIntermediateCA.crt` CA certificate is "issued" by the `testRootCA.crt`.
|
|||
[source,shell]
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
openssl genrsa -out testIntermediateCA.key 2048
|
||||
openssl req -new -key testIntermediateCA.key -subj "/CN=Elasticsearch Test Intermediate CA/OU=Elasticsearch/O=org" -out testIntermediateCA.csr -config ./openssl_config.cnf
|
||||
openssl req -new -key testIntermediateCA.key -subj "/CN=OpenSearch Test Intermediate CA/OU=OpenSearch/O=org" -out testIntermediateCA.csr -config ./openssl_config.cnf
|
||||
openssl x509 -req -in testIntermediateCA.csr -CA testRootCA.crt -CAkey testRootCA.key -CAcreateserial -out testIntermediateCA.crt -days 1460 -sha256 -extensions v3_ca -extfile ./openssl_config.cnf
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
@ -30,6 +30,6 @@ The `testClient.crt` end entity certificate is "issued" by the `testIntermediate
|
|||
[source,shell]
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
openssl genrsa -out testClient.key 2048
|
||||
openssl req -new -key testClient.key -subj "/CN=Elasticsearch Test Client/OU=Elasticsearch/O=org" -out testClient.csr -config ./openssl_config.cnf
|
||||
openssl req -new -key testClient.key -subj "/CN=OpenSearch Test Client/OU=OpenSearch/O=org" -out testClient.csr -config ./openssl_config.cnf
|
||||
openssl x509 -req -in testClient.csr -CA testIntermediateCA.crt -CAkey testIntermediateCA.key -CAcreateserial -out testClient.crt -days 1460 -sha256 -extensions usr_cert -extfile ./openssl_config.cnf
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -85,10 +85,10 @@ string_mask = nombstr
|
|||
0.organizationName_default = org
|
||||
|
||||
organizationalUnitName = Organizational Unit Name (eg, section)
|
||||
organizationalUnitName_default = elasticsearch
|
||||
organizationalUnitName_default = opensearch
|
||||
|
||||
commonName = Common Name (hostname, IP, or your name)
|
||||
commonName_default = Elasticsearch Test Certificate
|
||||
commonName_default = OpenSearch Test Certificate
|
||||
commonName_max = 64
|
||||
|
||||
####################################################################
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Set;
|
|||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
* Metadata about an {@link HttpHost} running Elasticsearch.
|
||||
* Metadata about an {@link HttpHost} running OpenSearch.
|
||||
*/
|
||||
public class Node {
|
||||
/**
|
||||
|
@ -46,12 +46,12 @@ public class Node {
|
|||
*/
|
||||
private final String name;
|
||||
/**
|
||||
* Version of Elasticsearch that the node is running or {@code null}
|
||||
* Version of OpenSearch that the node is running or {@code null}
|
||||
* if we don't know the version.
|
||||
*/
|
||||
private final String version;
|
||||
/**
|
||||
* Roles that the Elasticsearch process on the host has or {@code null}
|
||||
* Roles that the OpenSearch process on the host has or {@code null}
|
||||
* if we don't know what roles the node has.
|
||||
*/
|
||||
private final Roles roles;
|
||||
|
@ -109,7 +109,7 @@ public class Node {
|
|||
}
|
||||
|
||||
/**
|
||||
* Version of Elasticsearch that the node is running or {@code null}
|
||||
* Version of OpenSearch that the node is running or {@code null}
|
||||
* if we don't know the version.
|
||||
*/
|
||||
public String getVersion() {
|
||||
|
@ -117,7 +117,7 @@ public class Node {
|
|||
}
|
||||
|
||||
/**
|
||||
* Roles that the Elasticsearch process on the host has or {@code null}
|
||||
* Roles that the OpenSearch process on the host has or {@code null}
|
||||
* if we don't know what roles the node has.
|
||||
*/
|
||||
public Roles getRoles() {
|
||||
|
@ -173,7 +173,7 @@ public class Node {
|
|||
}
|
||||
|
||||
/**
|
||||
* Role information about an Elasticsearch process.
|
||||
* Role information about an OpenSearch process.
|
||||
*/
|
||||
public static final class Roles {
|
||||
|
||||
|
|
|
@ -33,10 +33,10 @@ import org.apache.http.protocol.HttpContext;
|
|||
* any special handling if authentication fails.
|
||||
* The default handler in Apache HTTP client mimics standard browser behaviour of clearing authentication
|
||||
* credentials if it receives a 401 response from the server. While this can be useful for browser, it is
|
||||
* rarely the desired behaviour with the Elasticsearch REST API.
|
||||
* rarely the desired behaviour with the OpenSearch REST API.
|
||||
* If the code using the REST client has configured credentials for the REST API, then we can and should
|
||||
* assume that this is intentional, and those credentials represent the best possible authentication
|
||||
* mechanism to the Elasticsearch node.
|
||||
* mechanism to the OpenSearch node.
|
||||
* If we receive a 401 status, a probably cause is that the authentication mechanism in place was unable
|
||||
* to perform the requisite password checks (the node has not yet recovered its state, or an external
|
||||
* authentication provider was unavailable).
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Objects;
|
|||
import static java.util.Collections.unmodifiableMap;
|
||||
|
||||
/**
|
||||
* HTTP Request to Elasticsearch.
|
||||
* HTTP Request to OpenSearch.
|
||||
*/
|
||||
public final class Request {
|
||||
private final String method;
|
||||
|
@ -122,8 +122,8 @@ public final class Request {
|
|||
}
|
||||
|
||||
/**
|
||||
* Set the portion of an HTTP request to Elasticsearch that can be
|
||||
* manipulated without changing Elasticsearch's behavior.
|
||||
* Set the portion of an HTTP request to OpenSearch that can be
|
||||
* manipulated without changing OpenSearch's behavior.
|
||||
*/
|
||||
public void setOptions(RequestOptions options) {
|
||||
Objects.requireNonNull(options, "options cannot be null");
|
||||
|
@ -131,8 +131,8 @@ public final class Request {
|
|||
}
|
||||
|
||||
/**
|
||||
* Set the portion of an HTTP request to Elasticsearch that can be
|
||||
* manipulated without changing Elasticsearch's behavior.
|
||||
* Set the portion of an HTTP request to OpenSearch that can be
|
||||
* manipulated without changing OpenSearch's behavior.
|
||||
*/
|
||||
public void setOptions(RequestOptions.Builder options) {
|
||||
Objects.requireNonNull(options, "options cannot be null");
|
||||
|
@ -140,8 +140,8 @@ public final class Request {
|
|||
}
|
||||
|
||||
/**
|
||||
* Get the portion of an HTTP request to Elasticsearch that can be
|
||||
* manipulated without changing Elasticsearch's behavior.
|
||||
* Get the portion of an HTTP request to OpenSearch that can be
|
||||
* manipulated without changing OpenSearch's behavior.
|
||||
*/
|
||||
public RequestOptions getOptions() {
|
||||
return options;
|
||||
|
|
|
@ -31,8 +31,8 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* The portion of an HTTP request to Elasticsearch that can be
|
||||
* manipulated without changing Elasticsearch's behavior.
|
||||
* The portion of an HTTP request to OpenSearch that can be
|
||||
* manipulated without changing OpenSearch's behavior.
|
||||
*/
|
||||
public final class RequestOptions {
|
||||
/**
|
||||
|
|
|
@ -135,13 +135,13 @@ public class Response {
|
|||
/**
|
||||
* Tests if a string matches the RFC 7234 specification for warning headers.
|
||||
* This assumes that the warn code is always 299 and the warn agent is always
|
||||
* Elasticsearch.
|
||||
* OpenSearch.
|
||||
*
|
||||
* @param s the value of a warning header formatted according to RFC 7234
|
||||
* @return {@code true} if the input string matches the specification
|
||||
*/
|
||||
private static boolean matchWarningHeaderPatternByPrefix(final String s) {
|
||||
return s.startsWith("299 Elasticsearch-");
|
||||
return s.startsWith("299 OpenSearch-");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -86,7 +86,7 @@ import static java.nio.charset.StandardCharsets.UTF_8;
|
|||
import static java.util.Collections.singletonList;
|
||||
|
||||
/**
|
||||
* Client that connects to an Elasticsearch cluster through HTTP.
|
||||
* Client that connects to an OpenSearch cluster through HTTP.
|
||||
* <p>
|
||||
* Must be created using {@link RestClientBuilder}, which allows to set all the different options or just rely on defaults.
|
||||
* The hosts that are part of the cluster need to be provided at creation time, but can also be replaced later
|
||||
|
@ -241,7 +241,7 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sends a request to the Elasticsearch cluster that the client points to.
|
||||
* Sends a request to the OpenSearch cluster that the client points to.
|
||||
* Blocks until the request is completed and returns its response or fails
|
||||
* by throwing an exception. Selects a host out of the provided ones in a
|
||||
* round-robin fashion. Failing hosts are marked dead and retried after a
|
||||
|
@ -260,10 +260,10 @@ public class RestClient implements Closeable {
|
|||
* {@link Exception#getCause()}.
|
||||
*
|
||||
* @param request the request to perform
|
||||
* @return the response returned by Elasticsearch
|
||||
* @return the response returned by OpenSearch
|
||||
* @throws IOException in case of a problem or the connection was aborted
|
||||
* @throws ClientProtocolException in case of an http protocol error
|
||||
* @throws ResponseException in case Elasticsearch responded with a status code that indicated an error
|
||||
* @throws ResponseException in case OpenSearch responded with a status code that indicated an error
|
||||
*/
|
||||
public Response performRequest(Request request) throws IOException {
|
||||
InternalRequest internalRequest = new InternalRequest(request);
|
||||
|
@ -335,7 +335,7 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sends a request to the Elasticsearch cluster that the client points to.
|
||||
* Sends a request to the OpenSearch cluster that the client points to.
|
||||
* The request is executed asynchronously and the provided
|
||||
* {@link ResponseListener} gets notified upon request completion or
|
||||
* failure. Selects a host out of the provided ones in a round-robin
|
||||
|
|
|
@ -129,7 +129,7 @@ public final class RestClientBuilder {
|
|||
* For example, if this is set to "/my/path", then any client request will become <code>"/my/path/" + endpoint</code>.
|
||||
* <p>
|
||||
* In essence, every request's {@code endpoint} is prefixed by this {@code pathPrefix}. The path prefix is useful for when
|
||||
* Elasticsearch is behind a proxy that provides a base path or a proxy that requires all paths to start with '/';
|
||||
* OpenSearch is behind a proxy that provides a base path or a proxy that requires all paths to start with '/';
|
||||
* it is not intended for other purposes and it should not be supplied in other scenarios.
|
||||
*
|
||||
* @throws NullPointerException if {@code pathPrefix} is {@code null}.
|
||||
|
|
|
@ -524,7 +524,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
*/
|
||||
private static String formatWarningWithoutDate(String warningBody) {
|
||||
final String hash = new String(new byte[40], StandardCharsets.UTF_8).replace('\0', 'e');
|
||||
return "299 Elasticsearch-1.2.2-SNAPSHOT-" + hash + " \"" + warningBody + "\"";
|
||||
return "299 OpenSearch-1.2.2-SNAPSHOT-" + hash + " \"" + warningBody + "\"";
|
||||
}
|
||||
|
||||
private static String formatWarning(String warningBody) {
|
||||
|
|
|
@ -51,7 +51,6 @@ import static java.util.Collections.unmodifiableMap;
|
|||
|
||||
/**
|
||||
* Class responsible for sniffing the http hosts from opensearch through the nodes info api and returning them back.
|
||||
* Compatible with elasticsearch 2.x+.
|
||||
*/
|
||||
public final class OpenSearchNodesSniffer implements NodesSniffer {
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ ext.expansions = { Architecture architecture, DockerBase base, boolean local ->
|
|||
sourceOpenSearch = """
|
||||
RUN curl --retry 8 -S -L \\
|
||||
--output /opt/opensearch.tar.gz \\
|
||||
https://artifacts-no-kpi.elastic.co/downloads/elasticsearch/$elasticsearch
|
||||
<artifacts_link_placeholder>
|
||||
"""
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ RUN set -eux ; \\
|
|||
RUN mkdir /usr/share/opensearch
|
||||
WORKDIR /usr/share/opensearch
|
||||
|
||||
${source_elasticsearch}
|
||||
${source_opensearch}
|
||||
|
||||
RUN tar zxf /opt/opensearch.tar.gz --strip-components=1
|
||||
RUN sed -i -e 's/OPENSEARCH_DISTRIBUTION_TYPE=tar/OPENSEARCH_DISTRIBUTION_TYPE=docker/' /usr/share/opensearch/bin/opensearch-env
|
||||
|
|
|
@ -278,15 +278,13 @@ apply plugin: 'nebula.ospackage-base'
|
|||
|
||||
// this is package indepdendent configuration
|
||||
ospackage {
|
||||
maintainer 'Elasticsearch Team <info@elastic.co>'
|
||||
maintainer 'OpenSearch Team'
|
||||
summary 'Distributed RESTful search engine built for the cloud'
|
||||
packageDescription '''
|
||||
Reference documentation can be found at
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
|
||||
and the 'Elasticsearch: The Definitive Guide' book can be found at
|
||||
https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html
|
||||
https://github.com/opensearch-project/OpenSearch
|
||||
'''.stripIndent().trim()
|
||||
url 'https://www.elastic.co/'
|
||||
url 'https://github.com/opensearch-project/OpenSearch'
|
||||
|
||||
// signing setup
|
||||
if (project.hasProperty('signing.password') && BuildParams.isSnapshotBuild() == false) {
|
||||
|
|
|
@ -60,12 +60,6 @@ if [ -f "$DEFAULT" ]; then
|
|||
. "$DEFAULT"
|
||||
fi
|
||||
|
||||
# ES_USER and ES_GROUP settings were removed
|
||||
if [ ! -z "$ES_USER" ] || [ ! -z "$ES_GROUP" ]; then
|
||||
echo "ES_USER and ES_GROUP settings are no longer supported. To run as a custom user/group use the archive distribution of Elasticsearch."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Define other required variables
|
||||
PID_FILE="$PID_DIR/$NAME.pid"
|
||||
DAEMON=$OPENSEARCH_HOME/bin/opensearch
|
||||
|
|
|
@ -45,12 +45,6 @@ if [ -f "$OPENSEARCH_ENV_FILE" ]; then
|
|||
. "$OPENSEARCH_ENV_FILE"
|
||||
fi
|
||||
|
||||
# ES_USER and ES_GROUP settings were removed
|
||||
if [ ! -z "$ES_USER" ] || [ ! -z "$ES_GROUP" ]; then
|
||||
echo "ES_USER and ES_GROUP settings are no longer supported. To run as a custom user/group use the archive distribution of Elasticsearch."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec="$OPENSEARCH_HOME/bin/opensearch"
|
||||
prog="opensearch"
|
||||
pidfile="$PID_DIR/${prog}.pid"
|
||||
|
|
|
@ -51,7 +51,7 @@ final class JavaVersionChecker {
|
|||
if (JavaVersion.compare(JavaVersion.CURRENT, JavaVersion.JAVA_11) < 0) {
|
||||
final String message = String.format(
|
||||
Locale.ROOT,
|
||||
"future versions of Elasticsearch will require Java 11; your Java version from [%s] does not meet this requirement",
|
||||
"future versions of OpenSearch will require Java 11; your Java version from [%s] does not meet this requirement",
|
||||
System.getProperty("java.home")
|
||||
);
|
||||
errPrintln(message);
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Tunes Elasticsearch JVM settings based on inspection of provided JVM options.
|
||||
* Tunes OpenSearch JVM settings based on inspection of provided JVM options.
|
||||
*/
|
||||
final class JvmErgonomics {
|
||||
|
||||
|
@ -49,7 +49,7 @@ final class JvmErgonomics {
|
|||
}
|
||||
|
||||
/**
|
||||
* Chooses additional JVM options for Elasticsearch.
|
||||
* Chooses additional JVM options for OpenSearch.
|
||||
*
|
||||
* @param userDefinedJvmOptions A list of JVM options that have been defined by the user.
|
||||
* @return A list of additional JVM options to set.
|
||||
|
@ -61,7 +61,7 @@ final class JvmErgonomics {
|
|||
final long maxDirectMemorySize = extractMaxDirectMemorySize(finalJvmOptions);
|
||||
|
||||
if (System.getProperty("os.name").startsWith("Windows") && JavaVersion.majorVersion(JavaVersion.CURRENT) == 8) {
|
||||
Launchers.errPrintln("Warning: with JDK 8 on Windows, Elasticsearch may be unable to derive correct");
|
||||
Launchers.errPrintln("Warning: with JDK 8 on Windows, OpenSearch may be unable to derive correct");
|
||||
Launchers.errPrintln(" ergonomic settings due to a JDK issue (JDK-8074459). Please use a newer");
|
||||
Launchers.errPrintln(" version of Java.");
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ class ListPluginsCommand extends EnvironmentAwareCommand {
|
|||
terminal.errorPrintln(
|
||||
"WARNING: plugin ["
|
||||
+ info.getName()
|
||||
+ "] was built for Elasticsearch version "
|
||||
+ "] was built for OpenSearch version "
|
||||
+ info.getVersion()
|
||||
+ " but version "
|
||||
+ Version.CURRENT
|
||||
|
|
|
@ -43,7 +43,7 @@ import java.util.stream.Stream;
|
|||
import static org.opensearch.cli.Terminal.Verbosity.VERBOSE;
|
||||
|
||||
/**
|
||||
* A command for the plugin CLI to remove a plugin from Elasticsearch.
|
||||
* A command for the plugin CLI to remove a plugin from OpenSearch.
|
||||
*/
|
||||
class RemovePluginCommand extends EnvironmentAwareCommand {
|
||||
|
||||
|
@ -55,7 +55,7 @@ class RemovePluginCommand extends EnvironmentAwareCommand {
|
|||
private final OptionSpec<String> arguments;
|
||||
|
||||
RemovePluginCommand() {
|
||||
super("removes a plugin from Elasticsearch");
|
||||
super("removes a plugin from OpenSearch");
|
||||
this.purgeOption = parser.acceptsAll(Arrays.asList("p", "purge"), "Purge plugin configuration files");
|
||||
this.arguments = parser.nonOptions("plugin name");
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ import java.util.jar.Manifest;
|
|||
* <li>Checks that class files are not duplicated across jars.</li>
|
||||
* <li>Checks any {@code X-Compile-Target-JDK} value in the jar
|
||||
* manifest is compatible with current JRE</li>
|
||||
* <li>Checks any {@code X-Compile-Elasticsearch-Version} value in
|
||||
* <li>Checks any {@code X-Compile-OpenSearch-Version} value in
|
||||
* the jar manifest is compatible with the current ES</li>
|
||||
* </ul>
|
||||
*/
|
||||
|
|
|
@ -135,7 +135,7 @@ public abstract class AbstractObjectParser<Value, Context> {
|
|||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* This is because json doesn't enforce ordering. Elasticsearch reads it in
|
||||
* This is because json doesn't enforce ordering. OpenSearch reads it in
|
||||
* the order sent but tools that generate json are free to put object
|
||||
* members in an unordered Map, jumbling them. Thus, if you care about order
|
||||
* you can send the object in the second way.
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.ServiceLoader;
|
|||
|
||||
/**
|
||||
* Extension point to customize the error message for unknown fields. We expect
|
||||
* Elasticsearch to plug a fancy implementation that uses Lucene's spelling
|
||||
* OpenSearch to plug a fancy implementation that uses Lucene's spelling
|
||||
* correction infrastructure to suggest corrections.
|
||||
*/
|
||||
public interface ErrorOnUnknown {
|
||||
|
|
|
@ -141,7 +141,7 @@ public enum XContentType {
|
|||
|
||||
/**
|
||||
* Clients compatible with ES 7.x might start sending media types with versioned media type
|
||||
* in a form of application/vnd.elasticsearch+json;compatible-with=7.
|
||||
* in a form of application/vnd.opensearch+json;compatible-with=7.
|
||||
* This has to be removed in order to be used in 7.x server.
|
||||
* The same client connecting using that media type will be able to communicate with ES 8 thanks to compatible API.
|
||||
* @param mediaType - a media type used on Content-Type header, might contain versioned media type.
|
||||
|
|
|
@ -29,7 +29,7 @@ import static java.util.Collections.singletonMap;
|
|||
/**
|
||||
* These tests run the Painless scripts used in the context docs against
|
||||
* slightly modified data designed around unit tests rather than a fully-
|
||||
* running Elasticsearch server.
|
||||
* running OpenSearch server.
|
||||
*/
|
||||
public class ContextExampleTests extends ScriptTestCase {
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ public class AwsEc2ServiceImplTests extends OpenSearchTestCase {
|
|||
assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class));
|
||||
}
|
||||
|
||||
public void testAWSCredentialsWithElasticsearchAwsSettings() {
|
||||
public void testAWSCredentialsWithOpenSearchAwsSettings() {
|
||||
final MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
secureSettings.setString("discovery.ec2.access_key", "aws_key");
|
||||
secureSettings.setString("discovery.ec2.secret_key", "aws_secret");
|
||||
|
@ -51,7 +51,7 @@ public class AwsEc2ServiceImplTests extends OpenSearchTestCase {
|
|||
assertThat(credentials.getAWSSecretKey(), is("aws_secret"));
|
||||
}
|
||||
|
||||
public void testAWSSessionCredentialsWithElasticsearchAwsSettings() {
|
||||
public void testAWSSessionCredentialsWithOpenSearchAwsSettings() {
|
||||
final MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
secureSettings.setString("discovery.ec2.access_key", "aws_key");
|
||||
secureSettings.setString("discovery.ec2.secret_key", "aws_secret");
|
||||
|
|
|
@ -110,7 +110,7 @@ public class RetryHttpInitializerWrapperTests extends OpenSearchTestCase {
|
|||
.build();
|
||||
|
||||
// TODO (URL) replace w/ opensearch url
|
||||
HttpRequest request = client.getRequestFactory().buildRequest("Get", new GenericUrl("http://elasticsearch.com"), null);
|
||||
HttpRequest request = client.getRequestFactory().buildRequest("Get", new GenericUrl("https://github.com/opensearch-project/OpenSearch"), null);
|
||||
HttpResponse response = request.execute();
|
||||
|
||||
assertThat(mockSleeper.getCount(), equalTo(3));
|
||||
|
@ -143,7 +143,7 @@ public class RetryHttpInitializerWrapperTests extends OpenSearchTestCase {
|
|||
.build();
|
||||
|
||||
// TODO (URL) replace w/ opensearch URL
|
||||
HttpRequest request1 = client.getRequestFactory().buildRequest("Get", new GenericUrl("http://elasticsearch.com"), null);
|
||||
HttpRequest request1 = client.getRequestFactory().buildRequest("Get", new GenericUrl("https://github.com/opensearch-project/OpenSearch"), null);
|
||||
try {
|
||||
request1.execute();
|
||||
fail("Request should fail if wait too long");
|
||||
|
@ -170,7 +170,7 @@ public class RetryHttpInitializerWrapperTests extends OpenSearchTestCase {
|
|||
.build();
|
||||
|
||||
// TODO (URL) replace w/ opensearch URL
|
||||
HttpRequest request = client.getRequestFactory().buildRequest("Get", new GenericUrl("http://elasticsearch.com"), null);
|
||||
HttpRequest request = client.getRequestFactory().buildRequest("Get", new GenericUrl("https://github.com/opensearch-project/OpenSearch"), null);
|
||||
HttpResponse response = request.execute();
|
||||
|
||||
assertThat(mockSleeper.getCount(), equalTo(1));
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
"tags": {
|
||||
"fingerprint": "xA6QJb-rGtg=",
|
||||
"items": [
|
||||
"elasticsearch",
|
||||
"opensearch",
|
||||
"dev"
|
||||
]
|
||||
},
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
"tags": {
|
||||
"fingerprint": "xA6QJb-rGtg=",
|
||||
"items": [
|
||||
"elasticsearch",
|
||||
"opensearch",
|
||||
"dev"
|
||||
]
|
||||
},
|
||||
|
|
|
@ -47,7 +47,7 @@ import static org.opensearch.repositories.azure.AzureStorageService.MIN_CHUNK_SI
|
|||
* <p>
|
||||
* Azure file system repository supports the following settings:
|
||||
* <dl>
|
||||
* <dt>{@code container}</dt><dd>Azure container name. Defaults to elasticsearch-snapshots</dd>
|
||||
* <dt>{@code container}</dt><dd>Azure container name. Defaults to opensearch-snapshots</dd>
|
||||
* <dt>{@code base_path}</dt><dd>Specifies the path within bucket to repository data. Defaults to root directory.</dd>
|
||||
* <dt>{@code chunk_size}</dt><dd>Large file can be divided into chunks. This parameter specifies the chunk size. Defaults to 64mb.</dd>
|
||||
* <dt>{@code compress}</dt><dd>If set to true metadata files will be stored compressed. Defaults to false.</dd>
|
||||
|
|
|
@ -1121,7 +1121,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
&& getOldClusterVersion().onOrAfter(Version.V_6_1_0) && getOldClusterVersion().before(Version.V_6_5_0)) {
|
||||
for (String warning : e.getResponse().getWarnings()) {
|
||||
assertThat(warning, containsString(
|
||||
"setting was deprecated in Elasticsearch and will be removed in a future release! "
|
||||
"setting was deprecated and will be removed in a future release! "
|
||||
+ "See the breaking changes documentation for the next major version."));
|
||||
assertThat(warning, startsWith("[search.remote."));
|
||||
}
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
This project contains tests that verify the distributions we build work
|
||||
correctly on the operating systems we support. They're intended to cover the
|
||||
steps a user would take when installing and configuring an Elasticsearch
|
||||
steps a user would take when installing and configuring an OpenSearch
|
||||
distribution. They're not intended to have significant coverage of the behavior
|
||||
of Elasticsearch's features.
|
||||
of OpenSearch's features.
|
||||
|
||||
There are two types of tests in this project. The old tests live in
|
||||
`src/test/` and are written in [Bats](https://github.com/sstephenson/bats),
|
||||
|
|
|
@ -71,7 +71,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
|||
public static final String ERROR_INCORRECT_PASSWORD = "Provided keystore password was incorrect";
|
||||
public static final String ERROR_CORRUPTED_KEYSTORE = "Keystore has been corrupted or tampered with";
|
||||
public static final String ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED = "ERROR: Keystore is not password-protected";
|
||||
public static final String ERROR_KEYSTORE_NOT_FOUND = "ERROR: Elasticsearch keystore not found";
|
||||
public static final String ERROR_KEYSTORE_NOT_FOUND = "ERROR: OpenSearch keystore not found";
|
||||
|
||||
/** Test initial archive state */
|
||||
public void test10InstallArchiveDistribution() throws Exception {
|
||||
|
@ -177,7 +177,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* This test simulates a user starting Elasticsearch on the command line without daemonizing
|
||||
* This test simulates a user starting OpenSearch on the command line without daemonizing
|
||||
*/
|
||||
public void test42KeystorePasswordOnTtyRunningInForeground() throws Exception {
|
||||
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
|
||||
|
@ -358,7 +358,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
|||
|
||||
/**
|
||||
* Check that if we provide the wrong password for a mounted and password-protected
|
||||
* keystore, Elasticsearch doesn't start.
|
||||
* keystore, OpenSearch doesn't start.
|
||||
*/
|
||||
public void test62DockerEnvironmentVariableBadPassword() throws Exception {
|
||||
assumeTrue(distribution().isDocker());
|
||||
|
@ -494,7 +494,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
|||
assertPermissionsAndOwnership(keystore, p660);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unknown Elasticsearch packaging type.");
|
||||
throw new IllegalStateException("Unknown OpenSearch packaging type.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -168,12 +168,12 @@ public class PackageTests extends PackagingTestCase {
|
|||
"journalctl _SYSTEMD_UNIT=opensearch.service "
|
||||
+ "--since \""
|
||||
+ start
|
||||
+ "\" --output cat | grep -v \"future versions of Elasticsearch will require Java 11\" | wc -l"
|
||||
+ "\" --output cat | grep -v \"future versions of OpenSearch will require Java 11\" | wc -l"
|
||||
).stdout.trim();
|
||||
assertThat(journalEntries, equalTo("0"));
|
||||
|
||||
assertPathsExist(installation.pidDir.resolve("opensearch.pid"));
|
||||
assertPathsExist(installation.logs.resolve("elasticsearch_server.json"));
|
||||
assertPathsExist(installation.logs.resolve("opensearch_server.json"));
|
||||
|
||||
runOpenSearchTests();
|
||||
verifyPackageInstallation(installation, distribution(), sh); // check startup script didn't change permissions
|
||||
|
@ -187,7 +187,7 @@ public class PackageTests extends PackagingTestCase {
|
|||
remove(distribution());
|
||||
|
||||
// removing must stop the service
|
||||
assertThat(sh.run("ps aux").stdout, not(containsString("org.opensearch.bootstrap.Elasticsearch")));
|
||||
assertThat(sh.run("ps aux").stdout, not(containsString("org.opensearch.bootstrap.OpenSearch")));
|
||||
|
||||
if (isSystemd()) {
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ public class PackageUpgradeTests extends PackagingTestCase {
|
|||
// add some docs
|
||||
makeRequest(
|
||||
Request.Post("http://localhost:9200/library/_doc/1?refresh=true&pretty")
|
||||
.bodyString("{ \"title\": \"Elasticsearch - The Definitive Guide\"}", ContentType.APPLICATION_JSON)
|
||||
.bodyString("{ \"title\": \"OpenSearch\"}", ContentType.APPLICATION_JSON)
|
||||
);
|
||||
makeRequest(
|
||||
Request.Post("http://localhost:9200/library/_doc/2?refresh=true&pretty")
|
||||
|
@ -101,7 +101,7 @@ public class PackageUpgradeTests extends PackagingTestCase {
|
|||
|
||||
private void assertDocsExist() throws Exception {
|
||||
String response1 = makeRequest(Request.Get("http://localhost:9200/library/_doc/1?pretty"));
|
||||
assertThat(response1, containsString("Elasticsearch"));
|
||||
assertThat(response1, containsString("OpenSearch"));
|
||||
String response2 = makeRequest(Request.Get("http://localhost:9200/library/_doc/2?pretty"));
|
||||
assertThat(response2, containsString("World"));
|
||||
String response3 = makeRequest(Request.Get("http://localhost:9200/library2/_doc/1?pretty"));
|
||||
|
|
|
@ -35,7 +35,7 @@ import static org.opensearch.packaging.util.Platforms.isSystemd;
|
|||
|
||||
public class Cleanup {
|
||||
|
||||
private static final List<String> ELASTICSEARCH_FILES_LINUX = Arrays.asList(
|
||||
private static final List<String> OPENSEARCH_FILES_LINUX = Arrays.asList(
|
||||
"/usr/share/opensearch",
|
||||
"/etc/opensearch/opensearch.keystore",
|
||||
"/etc/opensearch",
|
||||
|
@ -84,7 +84,7 @@ public class Cleanup {
|
|||
|
||||
// delete files that may still exist
|
||||
lsGlob(getRootTempDir(), "opensearch*").forEach(FileUtils::rm);
|
||||
final List<String> filesToDelete = Platforms.WINDOWS ? OPENSEARCH_FILES_WINDOWS : ELASTICSEARCH_FILES_LINUX;
|
||||
final List<String> filesToDelete = Platforms.WINDOWS ? OPENSEARCH_FILES_WINDOWS : OPENSEARCH_FILES_LINUX;
|
||||
// windows needs leniency due to asinine releasing of file locking async from a process exiting
|
||||
Consumer<? super Path> rm = Platforms.WINDOWS ? FileUtils::rmWithRetries : FileUtils::rm;
|
||||
filesToDelete.stream().map(Paths::get).filter(Files::exists).forEach(rm);
|
||||
|
|
|
@ -54,7 +54,7 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* Utilities for running packaging tests against the Elasticsearch Docker images.
|
||||
* Utilities for running packaging tests against the OpenSearch Docker images.
|
||||
*/
|
||||
public class Docker {
|
||||
private static final Log logger = LogFactory.getLog(Docker.class);
|
||||
|
@ -90,7 +90,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Runs an Elasticsearch Docker container.
|
||||
* Runs an OpenSearch Docker container.
|
||||
* @param distribution details about the docker image being tested.
|
||||
*/
|
||||
public static Installation runContainer(Distribution distribution) {
|
||||
|
@ -98,7 +98,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Runs an Elasticsearch Docker container, with options for overriding the config directory
|
||||
* Runs an OpenSearch Docker container, with options for overriding the config directory
|
||||
* through a bind mount, and passing additional environment variables.
|
||||
*
|
||||
* @param distribution details about the docker image being tested.
|
||||
|
@ -110,7 +110,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Runs an Elasticsearch Docker container, with options for overriding the config directory
|
||||
* Runs an OpenSearch Docker container, with options for overriding the config directory
|
||||
* through a bind mount, and passing additional environment variables.
|
||||
* @param distribution details about the docker image being tested.
|
||||
* @param volumes a map that declares any volume mappings to apply, or null
|
||||
|
@ -133,7 +133,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Similar to {@link #runContainer(Distribution, Map, Map)} in that it runs an Elasticsearch Docker
|
||||
* Similar to {@link #runContainer(Distribution, Map, Map)} in that it runs an OpenSearch Docker
|
||||
* container, expect that the container expecting it to exit e.g. due to configuration problem.
|
||||
*
|
||||
* @param distribution details about the docker image being tested.
|
||||
|
@ -219,7 +219,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Waits for the Elasticsearch process to start executing in the container.
|
||||
* Waits for the OpenSearch process to start executing in the container.
|
||||
* This is called every time a container is started.
|
||||
*/
|
||||
public static void waitForOpenSearchToStart() {
|
||||
|
@ -235,7 +235,7 @@ public class Docker {
|
|||
|
||||
psOutput = dockerShell.run("ps -ww ax").stdout;
|
||||
|
||||
if (psOutput.contains("org.opensearch.bootstrap.Elasticsearch")) {
|
||||
if (psOutput.contains("org.opensearch.bootstrap.OpenSearch")) {
|
||||
isOpenSearchRunning = true;
|
||||
break;
|
||||
}
|
||||
|
@ -247,7 +247,7 @@ public class Docker {
|
|||
if (isOpenSearchRunning == false) {
|
||||
final Shell.Result dockerLogs = getContainerLogs();
|
||||
fail(
|
||||
"Elasticsearch container did not start successfully.\n\nps output:\n"
|
||||
"OpenSearch container did not start successfully.\n\nps output:\n"
|
||||
+ psOutput
|
||||
+ "\n\nStdout:\n"
|
||||
+ dockerLogs.stdout
|
||||
|
@ -258,7 +258,7 @@ public class Docker {
|
|||
}
|
||||
|
||||
/**
|
||||
* Waits for the Elasticsearch container to exit.
|
||||
* Waits for the OpenSearch container to exit.
|
||||
*/
|
||||
private static void waitForOpenSearchToExit() {
|
||||
boolean isOpenSearchRunning = true;
|
||||
|
@ -280,7 +280,7 @@ public class Docker {
|
|||
|
||||
if (isOpenSearchRunning) {
|
||||
final Shell.Result dockerLogs = getContainerLogs();
|
||||
fail("Elasticsearch container did exit.\n\nStdout:\n" + dockerLogs.stdout + "\n\nStderr:\n" + dockerLogs.stderr);
|
||||
fail("OpenSearch container did exit.\n\nStdout:\n" + dockerLogs.stdout + "\n\nStderr:\n" + dockerLogs.stderr);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -545,7 +545,7 @@ public class Docker {
|
|||
r.run();
|
||||
} catch (Exception e) {
|
||||
final Shell.Result logs = getContainerLogs();
|
||||
logger.warn("Elasticsearch container failed to start.\n\nStdout:\n" + logs.stdout + "\n\nStderr:\n" + logs.stderr);
|
||||
logger.warn("OpenSearch container failed to start.\n\nStdout:\n" + logs.stdout + "\n\nStderr:\n" + logs.stderr);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -223,7 +223,7 @@ public class Packages {
|
|||
}
|
||||
|
||||
/**
|
||||
* Starts Elasticsearch, without checking that startup is successful.
|
||||
* Starts OpenSearch, without checking that startup is successful.
|
||||
*/
|
||||
public static Shell.Result runOpenSearchStartCommand(Shell sh) throws IOException {
|
||||
if (isSystemd()) {
|
||||
|
@ -265,7 +265,7 @@ public class Packages {
|
|||
|
||||
/**
|
||||
* A small wrapper for retrieving only recent journald logs for the
|
||||
* Elasticsearch service. It works by creating a cursor for the logs
|
||||
* OpenSearch service. It works by creating a cursor for the logs
|
||||
* when instantiated, and advancing that cursor when the {@code clear()}
|
||||
* method is called.
|
||||
*/
|
||||
|
@ -274,7 +274,7 @@ public class Packages {
|
|||
private String cursor;
|
||||
|
||||
/**
|
||||
* Create a new wrapper for Elasticsearch JournalD logs.
|
||||
* Create a new wrapper for OpenSearch JournalD logs.
|
||||
* @param sh A shell with appropriate permissions.
|
||||
*/
|
||||
public JournaldWrapper(Shell sh) {
|
||||
|
@ -284,7 +284,7 @@ public class Packages {
|
|||
|
||||
/**
|
||||
* "Clears" the journaled messages by retrieving the latest cursor
|
||||
* for Elasticsearch logs and storing it in class state.
|
||||
* for OpenSearch logs and storing it in class state.
|
||||
*/
|
||||
public void clear() {
|
||||
final String script = "sudo journalctl --unit=opensearch.service --lines=0 --show-cursor -o cat | sed -e 's/-- cursor: //'";
|
||||
|
@ -293,7 +293,7 @@ public class Packages {
|
|||
|
||||
/**
|
||||
* Retrieves all log messages coming after the stored cursor.
|
||||
* @return Recent journald logs for the Elasticsearch service.
|
||||
* @return Recent journald logs for the OpenSearch service.
|
||||
*/
|
||||
public Result getLogs() {
|
||||
return sh.run("journalctl -u opensearch.service --after-cursor='" + this.cursor + "'");
|
||||
|
|
|
@ -4,7 +4,7 @@ services:
|
|||
wildfly:
|
||||
image: jboss/wildfly:18.0.1.Final
|
||||
environment:
|
||||
JAVA_OPTS: -Delasticsearch.uri=opensearch:9200 -Djboss.http.port=8080 -Djava.net.preferIPv4Stack=true
|
||||
JAVA_OPTS: -Dopensearch.uri=opensearch:9200 -Djboss.http.port=8080 -Djava.net.preferIPv4Stack=true
|
||||
volumes:
|
||||
- ./build/distributions/example-app.war:/opt/jboss/wildfly/standalone/deployments/example-app.war
|
||||
ports:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Elasticsearch REST API JSON specification
|
||||
# OpenSearch REST API JSON specification
|
||||
|
||||
This repository contains a collection of JSON files which describe the [Elasticsearch](http://elastic.co) HTTP API.
|
||||
This repository contains a collection of JSON files which describe the [OpenSearch](https://github.com/opensearch-project/OpenSearch) HTTP API.
|
||||
|
||||
Their purpose is to formalize and standardize the API, to facilitate development of libraries and integrations.
|
||||
|
||||
|
@ -74,7 +74,7 @@ If an API is stable but it response should be treated as an arbitrary map of key
|
|||
|
||||
## Backwards compatibility
|
||||
|
||||
The specification follows the same backward compatibility guarantees as Elasticsearch.
|
||||
The specification follows the same backward compatibility guarantees as OpenSearch.
|
||||
|
||||
- Within a Major, additions only.
|
||||
- If an item has been documented wrong it should be deprecated instead as removing these might break downstream clients.
|
||||
|
|
|
@ -5,8 +5,8 @@ Test Suite:
|
|||
.Required settings
|
||||
=======================================
|
||||
Certain tests require specific settings to be applied to the
|
||||
Elasticsearch instance in order to pass. You should run
|
||||
Elasticsearch as follows:
|
||||
OpenSearch instance in order to pass. You should run
|
||||
OpenSearch as follows:
|
||||
|
||||
[source,sh]
|
||||
---------------------
|
||||
|
@ -84,7 +84,7 @@ in the `indices.get_settings` API.
|
|||
|
||||
Skipping tests:
|
||||
---------------
|
||||
If a test section should only be run on certain versions of Elasticsearch,
|
||||
If a test section should only be run on certain versions of OpenSearch,
|
||||
then the first entry in the section (after the title) should be called
|
||||
`skip`, and should contain the range of versions to be
|
||||
skipped, and the reason why the tests are skipped. For instance:
|
||||
|
@ -196,7 +196,7 @@ somevalue with whatever is the response in the same position."
|
|||
|
||||
=== `warnings`
|
||||
|
||||
The runner can assert the warnings headers returned by Elasticsearch through the `warning:` assertations
|
||||
The runner can assert the warnings headers returned by OpenSearch through the `warning:` assertations
|
||||
under `do:` operations.
|
||||
|
||||
=== `yaml`
|
||||
|
|
|
@ -162,7 +162,7 @@ public class RemoveCorruptedShardDataCommandIT extends OpenSearchIntegTestCase {
|
|||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(),
|
||||
allOf(containsString("failed to lock node's directory"),
|
||||
containsString("is Elasticsearch still running?")));
|
||||
containsString("is OpenSearch still running?")));
|
||||
}
|
||||
|
||||
final Path indexDir = getPathToShardData(indexName, ShardPath.INDEX_FOLDER_NAME);
|
||||
|
|
|
@ -62,7 +62,7 @@ public class PrivateSettingsIT extends OpenSearchIntegTestCase {
|
|||
.prepareUpdateSettings("test")
|
||||
.setSettings(Settings.builder().put("index.private", "private-update"))
|
||||
.get());
|
||||
final String message = "can not update private setting [index.private]; this setting is managed by Elasticsearch";
|
||||
final String message = "can not update private setting [index.private]; this setting is managed by OpenSearch";
|
||||
assertThat(e, hasToString(containsString(message)));
|
||||
final GetSettingsResponse responseAfterAttemptedUpdate = client().admin().indices().prepareGetSettings("test").get();
|
||||
assertNull(responseAfterAttemptedUpdate.getSetting("test", "index.private"));
|
||||
|
|
|
@ -32,11 +32,11 @@ import java.util.jar.JarInputStream;
|
|||
import java.util.jar.Manifest;
|
||||
|
||||
/**
|
||||
* Information about a build of Elasticsearch.
|
||||
* Information about a build of OpenSearch.
|
||||
*/
|
||||
public class Build {
|
||||
/**
|
||||
* The current build of Elasticsearch. Filled with information scanned at
|
||||
* The current build of OpenSearch. Filled with information scanned at
|
||||
* startup from the jar.
|
||||
*/
|
||||
public static final Build CURRENT;
|
||||
|
@ -106,8 +106,8 @@ public class Build {
|
|||
Manifest manifest = jar.getManifest();
|
||||
hash = manifest.getMainAttributes().getValue("Change");
|
||||
date = manifest.getMainAttributes().getValue("Build-Date");
|
||||
isSnapshot = "true".equals(manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Snapshot"));
|
||||
version = manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Version");
|
||||
isSnapshot = "true".equals(manifest.getMainAttributes().getValue("X-Compile-OpenSearch-Snapshot"));
|
||||
version = manifest.getMainAttributes().getValue("X-Compile-OpenSearch-Version");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -131,15 +131,15 @@ public class Build {
|
|||
}
|
||||
if (hash == null) {
|
||||
throw new IllegalStateException("Error finding the build hash. " +
|
||||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
"Stopping OpenSearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
if (date == null) {
|
||||
throw new IllegalStateException("Error finding the build date. " +
|
||||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
"Stopping OpenSearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
if (version == null) {
|
||||
throw new IllegalStateException("Error finding the build version. " +
|
||||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
"Stopping OpenSearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
|
||||
CURRENT = new Build(type, hash, date, isSnapshot, version);
|
||||
|
|
|
@ -94,7 +94,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
private static final String ROOT_CAUSE = "root_cause";
|
||||
|
||||
private static final Map<Integer, CheckedFunction<StreamInput, ? extends OpenSearchException, IOException>> ID_TO_SUPPLIER;
|
||||
private static final Map<Class<? extends OpenSearchException>, OpenSearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
|
||||
private static final Map<Class<? extends OpenSearchException>, OpenSearchExceptionHandle> CLASS_TO_OPENSEARCH_EXCEPTION_HANDLE;
|
||||
private final Map<String, List<String>> metadata = new HashMap<>();
|
||||
private final Map<String, List<String>> headers = new HashMap<>();
|
||||
|
||||
|
@ -299,7 +299,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
* Returns <code>true</code> iff the given class is a registered for an exception to be read.
|
||||
*/
|
||||
public static boolean isRegistered(Class<? extends Throwable> exception, Version version) {
|
||||
OpenSearchExceptionHandle openSearchExceptionHandle = CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.get(exception);
|
||||
OpenSearchExceptionHandle openSearchExceptionHandle = CLASS_TO_OPENSEARCH_EXCEPTION_HANDLE.get(exception);
|
||||
if (openSearchExceptionHandle != null) {
|
||||
return version.onOrAfter(openSearchExceptionHandle.versionAdded);
|
||||
}
|
||||
|
@ -307,14 +307,14 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
}
|
||||
|
||||
static Set<Class<? extends OpenSearchException>> getRegisteredKeys() { // for testing
|
||||
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.keySet();
|
||||
return CLASS_TO_OPENSEARCH_EXCEPTION_HANDLE.keySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialization id the given exception.
|
||||
*/
|
||||
public static int getId(Class<? extends OpenSearchException> exception) {
|
||||
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.get(exception).id;
|
||||
return CLASS_TO_OPENSEARCH_EXCEPTION_HANDLE.get(exception).id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -742,7 +742,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
org.opensearch.common.util.CancellableThreads.ExecutionCancelledException::new, 2, UNKNOWN_VERSION_ADDED),
|
||||
MASTER_NOT_DISCOVERED_EXCEPTION(org.opensearch.discovery.MasterNotDiscoveredException.class,
|
||||
org.opensearch.discovery.MasterNotDiscoveredException::new, 3, UNKNOWN_VERSION_ADDED),
|
||||
ELASTICSEARCH_SECURITY_EXCEPTION(org.opensearch.OpenSearchSecurityException.class,
|
||||
OPENSEARCH_SECURITY_EXCEPTION(org.opensearch.OpenSearchSecurityException.class,
|
||||
org.opensearch.OpenSearchSecurityException::new, 4, UNKNOWN_VERSION_ADDED),
|
||||
INDEX_SHARD_RESTORE_EXCEPTION(org.opensearch.index.snapshots.IndexShardRestoreException.class,
|
||||
org.opensearch.index.snapshots.IndexShardRestoreException::new, 5, UNKNOWN_VERSION_ADDED),
|
||||
|
@ -776,7 +776,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
org.opensearch.ResourceNotFoundException::new, 19, UNKNOWN_VERSION_ADDED),
|
||||
ACTION_TRANSPORT_EXCEPTION(org.opensearch.transport.ActionTransportException.class,
|
||||
org.opensearch.transport.ActionTransportException::new, 20, UNKNOWN_VERSION_ADDED),
|
||||
ELASTICSEARCH_GENERATION_EXCEPTION(org.opensearch.OpenSearchGenerationException.class,
|
||||
OPENSEARCH_GENERATION_EXCEPTION(org.opensearch.OpenSearchGenerationException.class,
|
||||
org.opensearch.OpenSearchGenerationException::new, 21, UNKNOWN_VERSION_ADDED),
|
||||
// 22 was CreateFailedEngineException
|
||||
INDEX_SHARD_STARTED_EXCEPTION(org.opensearch.index.shard.IndexShardStartedException.class,
|
||||
|
@ -801,7 +801,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
org.opensearch.indices.IndexPrimaryShardNotAllocatedException::new, 33, UNKNOWN_VERSION_ADDED),
|
||||
TRANSPORT_EXCEPTION(org.opensearch.transport.TransportException.class,
|
||||
org.opensearch.transport.TransportException::new, 34, UNKNOWN_VERSION_ADDED),
|
||||
ELASTICSEARCH_PARSE_EXCEPTION(org.opensearch.OpenSearchParseException.class,
|
||||
OPENSEARCH_PARSE_EXCEPTION(org.opensearch.OpenSearchParseException.class,
|
||||
org.opensearch.OpenSearchParseException::new, 35, UNKNOWN_VERSION_ADDED),
|
||||
SEARCH_EXCEPTION(org.opensearch.search.SearchException.class,
|
||||
org.opensearch.search.SearchException::new, 36, UNKNOWN_VERSION_ADDED),
|
||||
|
@ -859,7 +859,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
INDEX_SHARD_NOT_RECOVERING_EXCEPTION(org.opensearch.index.shard.IndexShardNotRecoveringException.class,
|
||||
org.opensearch.index.shard.IndexShardNotRecoveringException::new, 66, UNKNOWN_VERSION_ADDED),
|
||||
HTTP_EXCEPTION(org.opensearch.http.HttpException.class, org.opensearch.http.HttpException::new, 67, UNKNOWN_VERSION_ADDED),
|
||||
ELASTICSEARCH_EXCEPTION(OpenSearchException.class,
|
||||
OPENSEARCH_EXCEPTION(OpenSearchException.class,
|
||||
OpenSearchException::new, 68, UNKNOWN_VERSION_ADDED),
|
||||
SNAPSHOT_MISSING_EXCEPTION(org.opensearch.snapshots.SnapshotMissingException.class,
|
||||
org.opensearch.snapshots.SnapshotMissingException::new, 69, UNKNOWN_VERSION_ADDED),
|
||||
|
@ -948,7 +948,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
org.opensearch.cluster.metadata.ProcessClusterEventTimeoutException::new, 116, UNKNOWN_VERSION_ADDED),
|
||||
RETRY_ON_PRIMARY_EXCEPTION(ReplicationOperation.RetryOnPrimaryException.class,
|
||||
ReplicationOperation.RetryOnPrimaryException::new, 117, UNKNOWN_VERSION_ADDED),
|
||||
ELASTICSEARCH_TIMEOUT_EXCEPTION(org.opensearch.OpenSearchTimeoutException.class,
|
||||
OPENSEARCH_TIMEOUT_EXCEPTION(org.opensearch.OpenSearchTimeoutException.class,
|
||||
org.opensearch.OpenSearchTimeoutException::new, 118, UNKNOWN_VERSION_ADDED),
|
||||
QUERY_PHASE_EXECUTION_EXCEPTION(org.opensearch.search.query.QueryPhaseExecutionException.class,
|
||||
org.opensearch.search.query.QueryPhaseExecutionException::new, 119, UNKNOWN_VERSION_ADDED),
|
||||
|
@ -1094,7 +1094,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
|||
static {
|
||||
ID_TO_SUPPLIER = unmodifiableMap(Arrays
|
||||
.stream(OpenSearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.id, e -> e.constructor)));
|
||||
CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE = unmodifiableMap(Arrays
|
||||
CLASS_TO_OPENSEARCH_EXCEPTION_HANDLE = unmodifiableMap(Arrays
|
||||
.stream(OpenSearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.exceptionClass, e -> e)));
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.opensearch;
|
|||
import java.security.BasicPermission;
|
||||
|
||||
/**
|
||||
* Elasticsearch-specific permission to check before entering
|
||||
* OpenSearch-specific permission to check before entering
|
||||
* {@code AccessController.doPrivileged()} blocks.
|
||||
* <p>
|
||||
* We try to avoid these blocks in our code and keep security simple,
|
||||
|
|
|
@ -244,7 +244,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Version} of Elasticsearch that has been used to create an index given its settings.
|
||||
* Return the {@link Version} of OpenSearch that has been used to create an index given its settings.
|
||||
*
|
||||
* @throws IllegalStateException if the given index settings doesn't contain a value for the key
|
||||
* {@value IndexMetadata#SETTING_VERSION_CREATED}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.opensearch.common.io.stream.Writeable;
|
|||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Contains information about a dangling index, i.e. an index that Elasticsearch has found
|
||||
* Contains information about a dangling index, i.e. an index that OpenSearch has found
|
||||
* on-disk but is not present in the cluster state.
|
||||
*/
|
||||
public class DanglingIndexInfo implements Writeable {
|
||||
|
|
|
@ -30,7 +30,7 @@ import static org.opensearch.action.ValidateActions.addValidationError;
|
|||
|
||||
/**
|
||||
* A flush request to flush one or more indices. The flush process of an index basically frees memory from the index
|
||||
* by flushing data to the index storage and clearing the internal transaction log. By default, Elasticsearch uses
|
||||
* by flushing data to the index storage and clearing the internal transaction log. By default, OpenSearch uses
|
||||
* memory heuristics in order to automatically trigger flush operations as required in order to clear memory.
|
||||
* <p>
|
||||
* Best created with {@link org.opensearch.client.Requests#flushRequest(String...)}.
|
||||
|
|
|
@ -18,6 +18,6 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* Actions that Elasticsearch can take either on the data stored on disk or on other nodes.
|
||||
* Actions that OpenSearch can take either on the data stored on disk or on other nodes.
|
||||
*/
|
||||
package org.opensearch.action;
|
||||
|
|
|
@ -39,7 +39,7 @@ public abstract class EnvironmentAwareCommand extends Command {
|
|||
private final OptionSpec<KeyValuePair> settingOption;
|
||||
|
||||
/**
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading Elasticsearch
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading OpenSearch
|
||||
* configuration files.
|
||||
*
|
||||
* @param description the command description
|
||||
|
|
|
@ -21,12 +21,12 @@ package org.opensearch.cli;
|
|||
|
||||
/**
|
||||
* A command that is aware of logging. This class should be preferred over the base {@link Command} class for any CLI tools that depend on
|
||||
* core Elasticsearch as they could directly or indirectly touch classes that touch logging and as such logging needs to be configured.
|
||||
* core OpenSearch as they could directly or indirectly touch classes that touch logging and as such logging needs to be configured.
|
||||
*/
|
||||
public abstract class LoggingAwareCommand extends Command {
|
||||
|
||||
/**
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading Elasticsearch
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading OpenSearch
|
||||
* configuration files.
|
||||
*
|
||||
* @param description the command description
|
||||
|
|
|
@ -21,13 +21,13 @@ package org.opensearch.cli;
|
|||
|
||||
/**
|
||||
* A multi-command that is aware of logging. This class should be preferred over the base {@link MultiCommand} class for any CLI tools that
|
||||
* depend on core Elasticsearch as they could directly or indirectly touch classes that touch logging and as such logging needs to be
|
||||
* depend on core OpenSearch as they could directly or indirectly touch classes that touch logging and as such logging needs to be
|
||||
* configured.
|
||||
*/
|
||||
public abstract class LoggingAwareMultiCommand extends MultiCommand {
|
||||
|
||||
/**
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading Elasticsearch
|
||||
* Construct the command with the specified command description. This command will have logging configured without reading OpenSearch
|
||||
* configuration files.
|
||||
*
|
||||
* @param description the command description
|
||||
|
|
|
@ -89,7 +89,7 @@ import static org.opensearch.common.unit.TimeValue.timeValueSeconds;
|
|||
* started in client mode (only connects, no bind).
|
||||
*
|
||||
* @deprecated {@link TransportClient} is deprecated in favour of the High Level REST client and will
|
||||
* be removed in Elasticsearch 8.0.
|
||||
* be removed in OpenSearch 1.0.0.
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class TransportClient extends AbstractClient {
|
||||
|
|
|
@ -63,9 +63,9 @@ public abstract class OpenSearchNodeCommand extends EnvironmentAwareCommand {
|
|||
static final String STOP_WARNING_MSG =
|
||||
DELIMITER +
|
||||
"\n" +
|
||||
" WARNING: Elasticsearch MUST be stopped before running this tool." +
|
||||
" WARNING: OpenSearch MUST be stopped before running this tool." +
|
||||
"\n";
|
||||
protected static final String FAILED_TO_OBTAIN_NODE_LOCK_MSG = "failed to lock node's directory, is Elasticsearch still running?";
|
||||
protected static final String FAILED_TO_OBTAIN_NODE_LOCK_MSG = "failed to lock node's directory, is OpenSearch still running?";
|
||||
protected static final String ABORTED_BY_USER_MSG = "aborted by user";
|
||||
final OptionSpec<Integer> nodeOrdinalOption;
|
||||
static final String NO_NODE_FOLDER_FOUND_MSG = "no node folder is found in data folder(s), node has not been started yet?";
|
||||
|
|
|
@ -765,7 +765,7 @@ public class MetadataCreateIndexService {
|
|||
shardLimitValidator.validateShardLimit(indexSettings, currentState);
|
||||
if (indexSettings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) {
|
||||
DEPRECATION_LOGGER.deprecate("soft_deletes_disabled",
|
||||
"Creating indices with soft-deletes disabled is deprecated and will be removed in future Elasticsearch versions. " +
|
||||
"Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions. " +
|
||||
"Please do not specify value for setting [index.soft_deletes.enabled] of index [" + request.index() + "].");
|
||||
}
|
||||
validateTranslogRetentionSettings(indexSettings);
|
||||
|
|
|
@ -101,7 +101,7 @@ public class MetadataIndexUpgradeService {
|
|||
}
|
||||
|
||||
/**
|
||||
* Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks.
|
||||
* Checks if the index was already opened by this version of OpenSearch and doesn't require any additional checks.
|
||||
*/
|
||||
boolean isUpgraded(IndexMetadata indexMetadata) {
|
||||
return indexMetadata.getUpgradedVersion().onOrAfter(Version.CURRENT);
|
||||
|
|
|
@ -48,7 +48,7 @@ import static java.util.Collections.emptyMap;
|
|||
|
||||
/**
|
||||
* {@link IndexShardRoutingTable} encapsulates all instances of a single shard.
|
||||
* Each Elasticsearch index consists of multiple shards, each shard encapsulates
|
||||
* Each OpenSearch index consists of multiple shards, each shard encapsulates
|
||||
* a disjoint set of the index data and each shard has one or more instances
|
||||
* referred to as replicas of a shard. Given that, this class encapsulates all
|
||||
* replicas (instances) for a single index shard.
|
||||
|
@ -308,7 +308,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> {
|
|||
/**
|
||||
* Adjust the for all other nodes' collected stats. In the original ranking paper there is no need to adjust other nodes' stats because
|
||||
* Cassandra sends occasional requests to all copies of the data, so their stats will be updated during that broadcast phase. In
|
||||
* Elasticsearch, however, we do not have that sort of broadcast-to-all behavior. In order to prevent a node that gets a high score and
|
||||
* OpenSearch, however, we do not have that sort of broadcast-to-all behavior. In order to prevent a node that gets a high score and
|
||||
* then never gets any more requests, we must ensure it eventually returns to a more normal score and can be a candidate for serving
|
||||
* requests.
|
||||
*
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.opensearch.cluster.routing;
|
|||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/**
|
||||
* Hash function based on the Murmur3 algorithm, which is the default as of Elasticsearch 2.0.
|
||||
* Hash function based on the Murmur3 algorithm, which is the default for OpenSearch.
|
||||
*/
|
||||
public final class Murmur3HashFunction {
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider {
|
|||
private volatile int clusterShardLimit;
|
||||
|
||||
/**
|
||||
* Controls the maximum number of shards per index on a single Elasticsearch
|
||||
* Controls the maximum number of shards per index on a single OpenSearch
|
||||
* node. Negative values are interpreted as unlimited.
|
||||
*/
|
||||
public static final Setting<Integer> INDEX_TOTAL_SHARDS_PER_NODE_SETTING =
|
||||
|
|
|
@ -32,14 +32,14 @@ import java.util.concurrent.ThreadLocalRandom;
|
|||
/**
|
||||
* Provides factory methods for producing reproducible sources of
|
||||
* randomness. Reproducible sources of randomness contribute to
|
||||
* reproducible tests. When running the Elasticsearch test suite, the
|
||||
* reproducible tests. When running the OpenSearch test suite, the
|
||||
* test runner will establish a global random seed accessible via the
|
||||
* system property "tests.seed". By seeding a random number generator
|
||||
* with this global seed, we ensure that instances of Random produced
|
||||
* with this class produce reproducible sources of randomness under
|
||||
* when running under the Elasticsearch test suite. Alternatively,
|
||||
* when running under the OpenSearch test suite. Alternatively,
|
||||
* a reproducible source of randomness can be produced by providing a
|
||||
* setting a reproducible seed. When running the Elasticsearch server
|
||||
* setting a reproducible seed. When running the OpenSearch server
|
||||
* process, non-reproducible sources of randomness are provided (unless
|
||||
* a setting is provided for a module that exposes a seed setting (e.g.,
|
||||
* NodeEnvironment#NODE_ID_SEED_SETTING)).
|
||||
|
@ -83,7 +83,7 @@ public final class Randomness {
|
|||
|
||||
/**
|
||||
* Provides a source of randomness that is reproducible when
|
||||
* running under the Elasticsearch test suite, and otherwise
|
||||
* running under the OpenSearch test suite, and otherwise
|
||||
* produces a non-reproducible source of randomness. Reproducible
|
||||
* sources of randomness are created when the system property
|
||||
* "tests.seed" is set and the security policy allows reading this
|
||||
|
|
|
@ -56,7 +56,7 @@ public class CompressorFactory {
|
|||
return null;
|
||||
}
|
||||
|
||||
/** true if the bytes were compressed with LZF: only used before elasticsearch 2.0 */
|
||||
/** true if the bytes were compressed with LZF*/
|
||||
private static boolean isAncient(BytesReference bytes) {
|
||||
return bytes.length() >= 3 &&
|
||||
bytes.get(0) == 'Z' &&
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.opensearch.geometry.Rectangle;
|
|||
|
||||
|
||||
/**
|
||||
* Utility class that transforms Elasticsearch geometry objects to the Lucene representation
|
||||
* Utility class that transforms OpenSearch geometry objects to the Lucene representation
|
||||
*/
|
||||
public class GeoShapeUtils {
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.nio.file.Path;
|
|||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
* Elasticsearch utils to work with {@link java.nio.file.Path}
|
||||
* OpenSearch utils to work with {@link java.nio.file.Path}
|
||||
*/
|
||||
public final class FileSystemUtils {
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
|||
|
||||
/**
|
||||
* Implementers can be written to a {@linkplain StreamOutput} and read from a {@linkplain StreamInput}. This allows them to be "thrown
|
||||
* across the wire" using Elasticsearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by
|
||||
* across the wire" using OpenSearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by
|
||||
* serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged.
|
||||
*/
|
||||
public interface Writeable {
|
||||
|
|
|
@ -18,6 +18,6 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* Classes for streaming objects from one Elasticsearch node to another over its binary internode protocol.
|
||||
* Classes for streaming objects from one OpenSearch node to another over its binary internode protocol.
|
||||
*/
|
||||
package org.opensearch.common.io.stream;
|
||||
|
|
|
@ -400,11 +400,11 @@ public class Joda {
|
|||
// check for deprecations, but after it has parsed correctly so invalid values aren't counted as deprecated
|
||||
if (millis < 0) {
|
||||
getDeprecationLogger().deprecate("epoch-negative", "Use of negative values" +
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of OpenSearch.");
|
||||
}
|
||||
if (scientificNotation.matcher(text).find()) {
|
||||
getDeprecationLogger().deprecate("epoch-scientific-notation", "Use of scientific notation" +
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of OpenSearch.");
|
||||
}
|
||||
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
|
||||
bucket.saveField(DateTimeFieldType.year(), dt.getYear());
|
||||
|
|
|
@ -43,11 +43,11 @@ import java.util.regex.Pattern;
|
|||
public class HeaderWarning {
|
||||
/**
|
||||
* Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code
|
||||
* is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash.
|
||||
* is always 299. Further, this pattern assumes that the warn agent represents a version of OpenSearch including the build hash.
|
||||
*/
|
||||
public static final Pattern WARNING_HEADER_PATTERN = Pattern.compile(
|
||||
"299 " + // warn code
|
||||
"Elasticsearch-" + // warn agent
|
||||
"OpenSearch-" + // warn agent
|
||||
"\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-" + // warn agent
|
||||
"(?:[a-f0-9]{7}(?:[a-f0-9]{33})?|unknown) " + // warn agent
|
||||
"\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\"( " + // quoted warning value, captured
|
||||
|
@ -72,7 +72,7 @@ public class HeaderWarning {
|
|||
private static final String WARNING_PREFIX =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"299 Elasticsearch-%s%s-%s",
|
||||
"299 OpenSearch-%s%s-%s",
|
||||
Version.CURRENT.toString(),
|
||||
Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "",
|
||||
Build.CURRENT.hash());
|
||||
|
@ -151,7 +151,7 @@ public class HeaderWarning {
|
|||
|
||||
/**
|
||||
* Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string
|
||||
* {@code 299 Elasticsearch-6.0.0 "warning value"}, the return value of this method would be {@code warning value}.
|
||||
* {@code 299 OpenSearch-6.0.0 "warning value"}, the return value of this method would be {@code warning value}.
|
||||
*
|
||||
* @param s the value of a warning header formatted according to RFC 7234.
|
||||
* @return the extracted warning value
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.HashSet;
|
|||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Format string for Elasticsearch log messages.
|
||||
* Format string for OpenSearch log messages.
|
||||
*/
|
||||
public class LoggerMessageFormat {
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.stream.Stream;
|
|||
* <li>level - INFO, WARN etc</li>
|
||||
* <li>component - logger name, most of the times class name</li>
|
||||
* <li>cluster.name - taken from sys:opensearch.logs.cluster_name system property because it is always set</li>
|
||||
* <li>node.name - taken from NodeNamePatternConverter, as it can be set in runtime as hostname when not set in elasticsearch.yml</li>
|
||||
* <li>node.name - taken from NodeNamePatternConverter, as it can be set in runtime as hostname when not set in opensearch.yml</li>
|
||||
* <li>node_and_cluster_id - in json as node.id and cluster.uuid - taken from NodeAndClusterIdConverter and present
|
||||
* once clusterStateUpdate is first received</li>
|
||||
* <li>message - a json escaped message. Multiline messages will be converted to single line with new line explicitly
|
||||
|
|
|
@ -570,7 +570,7 @@ public abstract class AbstractScopedSettings {
|
|||
"can not update internal setting [" + setting.getKey() + "]; this setting is managed via a dedicated API");
|
||||
} else if (setting.isPrivateIndex()) {
|
||||
throw new IllegalArgumentException(
|
||||
"can not update private setting [" + setting.getKey() + "]; this setting is managed by Elasticsearch");
|
||||
"can not update private setting [" + setting.getKey() + "]; this setting is managed by OpenSearch");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public abstract class BaseKeyStoreCommand extends KeyStoreAwareCommand {
|
|||
keyStore = KeyStoreWrapper.load(configFile);
|
||||
if (keyStore == null) {
|
||||
if (keyStoreMustExist) {
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Elasticsearch keystore not found at [" +
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "OpenSearch keystore not found at [" +
|
||||
KeyStoreWrapper.keystorePath(env.configFile()) + "]. Use 'create' command to create one.");
|
||||
} else if (options.has(forceOption) == false) {
|
||||
if (terminal.promptYesNo("The opensearch keystore does not exist. Do you want to create it?", false) == false) {
|
||||
|
|
|
@ -39,7 +39,7 @@ class ChangeKeyStorePasswordCommand extends BaseKeyStoreCommand {
|
|||
try (SecureString newPassword = readPassword(terminal, true)) {
|
||||
final KeyStoreWrapper keyStore = getKeyStore();
|
||||
keyStore.save(env.configFile(), newPassword.getChars());
|
||||
terminal.println("Elasticsearch keystore password changed successfully.");
|
||||
terminal.println("OpenSearch keystore password changed successfully.");
|
||||
} catch (SecurityException e) {
|
||||
throw new UserException(ExitCodes.DATA_ERROR, e.getMessage());
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ public class HasPasswordKeyStoreCommand extends KeyStoreAwareCommand {
|
|||
// We handle error printing here so we can respect the "--silent" flag
|
||||
// We have to throw an exception to get a nonzero exit code
|
||||
if (keyStore == null) {
|
||||
terminal.errorPrintln(Terminal.Verbosity.NORMAL, "ERROR: Elasticsearch keystore not found");
|
||||
terminal.errorPrintln(Terminal.Verbosity.NORMAL, "ERROR: OpenSearch keystore not found");
|
||||
throw new UserException(NO_PASSWORD_EXIT_CODE, null);
|
||||
}
|
||||
if (keyStore.hasPassword() == false) {
|
||||
|
|
|
@ -73,7 +73,7 @@ import java.util.Set;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* A disk based container for sensitive settings in Elasticsearch.
|
||||
* A disk based container for sensitive settings in OpenSearch.
|
||||
*
|
||||
* Loading a keystore has 2 phases. First, call {@link #load(Path)}. Then call
|
||||
* {@link #decrypt(char[])} with the keystore password, or an empty char array if
|
||||
|
@ -205,7 +205,7 @@ public class KeyStoreWrapper implements SecureSettings {
|
|||
}
|
||||
|
||||
/**
|
||||
* Loads information about the Elasticsearch keystore from the provided config directory.
|
||||
* Loads information about the OpenSearch keystore from the provided config directory.
|
||||
*
|
||||
* {@link #decrypt(char[])} must be called before reading or writing any entries.
|
||||
* Returns {@code null} if no keystore exists.
|
||||
|
@ -223,10 +223,10 @@ public class KeyStoreWrapper implements SecureSettings {
|
|||
try {
|
||||
formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, FORMAT_VERSION);
|
||||
} catch (IndexFormatTooOldException e) {
|
||||
throw new IllegalStateException("The Elasticsearch keystore [" + keystoreFile + "] format is too old. " +
|
||||
throw new IllegalStateException("The OpenSearch keystore [" + keystoreFile + "] format is too old. " +
|
||||
"You should delete and recreate it in order to upgrade.", e);
|
||||
} catch (IndexFormatTooNewException e) {
|
||||
throw new IllegalStateException("The Elasticsearch keystore [" + keystoreFile + "] format is too new. " +
|
||||
throw new IllegalStateException("The OpenSearch keystore [" + keystoreFile + "] format is too new. " +
|
||||
"Are you trying to downgrade? You should delete and recreate it in order to downgrade.", e);
|
||||
}
|
||||
byte hasPasswordByte = input.readByte();
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Set;
|
|||
/**
|
||||
* A secure setting.
|
||||
*
|
||||
* This class allows access to settings from the Elasticsearch keystore.
|
||||
* This class allows access to settings from the OpenSearch keystore.
|
||||
*/
|
||||
public abstract class SecureSetting<T> extends Setting<T> {
|
||||
|
||||
|
@ -87,7 +87,7 @@ public abstract class SecureSetting<T> extends Setting<T> {
|
|||
if (secureSettings == null || secureSettings.getSettingNames().contains(getKey()) == false) {
|
||||
if (super.exists(settings)) {
|
||||
throw new IllegalArgumentException("Setting [" + getKey() + "] is a secure setting" +
|
||||
" and must be stored inside the Elasticsearch keystore, but was found inside opensearch.yml");
|
||||
" and must be stored inside the OpenSearch keystore, but was found inside opensearch.yml");
|
||||
}
|
||||
return getFallback(settings);
|
||||
}
|
||||
|
|
|
@ -518,7 +518,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
SecureSettings secureSettings = settings.getSecureSettings();
|
||||
if (secureSettings != null && secureSettings.getSettingNames().contains(getKey())) {
|
||||
throw new IllegalArgumentException("Setting [" + getKey() + "] is a non-secure setting" +
|
||||
" and must be stored inside opensearch.yml, but was found inside the Elasticsearch keystore");
|
||||
" and must be stored inside opensearch.yml, but was found inside the OpenSearch keystore");
|
||||
}
|
||||
return settings.get(getKey(), defaultValue.apply(settings));
|
||||
}
|
||||
|
@ -530,7 +530,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
// It would be convenient to show its replacement key, but replacement is often not so simple
|
||||
final String key = getKey();
|
||||
Settings.DeprecationLoggerHolder.deprecationLogger
|
||||
.deprecate(key, "[{}] setting was deprecated in Elasticsearch and will be removed in a future release! "
|
||||
.deprecate(key, "[{}] setting was deprecated in OpenSearch and will be removed in a future release! "
|
||||
+ "See the breaking changes documentation for the next major version.", key);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class SettingsModule implements Module {
|
|||
builder.append(System.lineSeparator());
|
||||
builder.append(System.lineSeparator());
|
||||
int count = 0;
|
||||
for (String word : ("Since elasticsearch 5.x index level settings can NOT be set on the nodes configuration like " +
|
||||
for (String word : ("Index level settings can NOT be set on the nodes configuration like " +
|
||||
"the opensearch.yaml, in system properties or command line arguments." +
|
||||
"In order to upgrade all indices the settings must be updated via the /${index}/_settings API. " +
|
||||
"Unless all settings are dynamic all indices must be closed in order to apply the upgrade" +
|
||||
|
|
|
@ -61,7 +61,7 @@ public class OpenSearchExecutors {
|
|||
/**
|
||||
* Setting to manually control the number of allocated processors. This setting is used to adjust thread pool sizes per node. The
|
||||
* default value is {@link Runtime#availableProcessors()} but should be manually controlled if not all processors on the machine are
|
||||
* available to Elasticsearch (e.g., because of CPU limits).
|
||||
* available to OpenSearch (e.g., because of CPU limits).
|
||||
*/
|
||||
public static final Setting<Integer> NODE_PROCESSORS_SETTING = new Setting<>(
|
||||
"node.processors",
|
||||
|
|
|
@ -231,9 +231,8 @@ public class GatewayMetaState implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Elasticsearch 2.0 removed several deprecated features and as well as support for Lucene 3.x. This method calls
|
||||
* {@link MetadataIndexUpgradeService} to makes sure that indices are compatible with the current version. The
|
||||
* MetadataIndexUpgradeService might also update obsolete settings if needed.
|
||||
* This method calls {@link MetadataIndexUpgradeService} to makes sure that indices are compatible with the current
|
||||
* version. The MetadataIndexUpgradeService might also update obsolete settings if needed.
|
||||
*
|
||||
* @return input <code>metadata</code> if no upgrade is needed or an upgraded metadata
|
||||
*/
|
||||
|
|
|
@ -121,7 +121,7 @@ public class MetaStateService {
|
|||
if (globalMetadata != null) {
|
||||
metadataBuilder = Metadata.builder(globalMetadata);
|
||||
indexGraveyard = globalMetadata.custom(IndexGraveyard.TYPE);
|
||||
assert Version.CURRENT.major < 8 : "failed to find manifest file, which is mandatory staring with Elasticsearch version 8.0";
|
||||
assert Version.CURRENT.major < 8 : "failed to find manifest file, which is mandatory staring with OpenSearch version 1.0.0";
|
||||
} else {
|
||||
metadataBuilder = Metadata.builder();
|
||||
indexGraveyard = IndexGraveyard.builder().build();
|
||||
|
@ -131,7 +131,7 @@ public class MetaStateService {
|
|||
Tuple<IndexMetadata, Long> indexMetadataAndGeneration =
|
||||
INDEX_METADATA_FORMAT.loadLatestStateWithGeneration(logger, namedXContentRegistry,
|
||||
nodeEnv.resolveIndexFolder(indexFolderName));
|
||||
assert Version.CURRENT.major < 8 : "failed to find manifest file, which is mandatory staring with Elasticsearch version 8.0";
|
||||
assert Version.CURRENT.major < 8 : "failed to find manifest file, which is mandatory staring with OpenSearch version 1.0.0";
|
||||
IndexMetadata indexMetadata = indexMetadataAndGeneration.v1();
|
||||
long generation = indexMetadataAndGeneration.v2();
|
||||
if (indexMetadata != null) {
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A value class representing the basic required properties of an Elasticsearch index.
|
||||
* A value class representing the basic required properties of an OpenSearch index.
|
||||
*/
|
||||
public class Index implements Writeable, ToXContentObject {
|
||||
|
||||
|
|
|
@ -288,7 +288,7 @@ public final class IndexModule {
|
|||
* Registers the given {@link Similarity} with the given name.
|
||||
* The function takes as parameters:<ul>
|
||||
* <li>settings for this similarity
|
||||
* <li>version of Elasticsearch when the index was created
|
||||
* <li>version of OpenSearch when the index was created
|
||||
* <li>ScriptService, for script-based similarities
|
||||
* </ul>
|
||||
*
|
||||
|
|
|
@ -80,7 +80,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
|||
/**
|
||||
* Reads how much of the source to log. The user can specify any value they
|
||||
* like and numbers are interpreted the maximum number of characters to log
|
||||
* and everything else is interpreted as Elasticsearch interprets booleans
|
||||
* and everything else is interpreted as OpenSearch interprets booleans
|
||||
* which is then converted to 0 for false and Integer.MAX_VALUE for true.
|
||||
*/
|
||||
public static final Setting<Integer> INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING =
|
||||
|
|
|
@ -39,8 +39,6 @@ import java.util.List;
|
|||
/**
|
||||
* A mapper that indexes the field names of a document under <code>_field_names</code>. This mapper is typically useful in order
|
||||
* to have fast <code>exists</code> and <code>missing</code> queries/filters.
|
||||
*
|
||||
* Added in Elasticsearch 1.3.
|
||||
*/
|
||||
public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
|
|
|
@ -166,7 +166,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||
fieldType = context.getMapperService().fieldType(field);
|
||||
} else {
|
||||
deprecationLogger.deprecate("seed_requires_field",
|
||||
"As of version 7.0 Elasticsearch will require that a [field] parameter is provided when a [seed] is set");
|
||||
"OpenSearch requires that a [field] parameter is provided when a [seed] is set");
|
||||
fieldType = context.getMapperService().fieldType(IdFieldMapper.NAME);
|
||||
}
|
||||
if (fieldType == null) {
|
||||
|
|
|
@ -76,7 +76,7 @@ import static org.opensearch.index.translog.TranslogConfig.EMPTY_TRANSLOG_BUFFER
|
|||
/**
|
||||
* A Translog is a per index shard component that records all non-committed index operations in a durable manner.
|
||||
* In OpenSearch there is one Translog instance per {@link org.opensearch.index.engine.InternalEngine}.
|
||||
* Additionally, since Elasticsearch 2.0 the engine also records a {@link #TRANSLOG_UUID_KEY} with each commit to ensure a strong
|
||||
* Additionally, the engine also records a {@link #TRANSLOG_UUID_KEY} with each commit to ensure a strong
|
||||
* association between the lucene index an the transaction log file. This UUID is used to prevent accidental recovery from a transaction
|
||||
* log that belongs to a
|
||||
* different engine.
|
||||
|
|
|
@ -78,7 +78,7 @@ public final class InternalAggregations extends Aggregations implements Writeabl
|
|||
* Constructs a node in the aggregation tree.
|
||||
* @param pipelineTreeSource must be null inside the tree or after final reduction. Should reference the
|
||||
* search request otherwise so we can properly serialize the response to
|
||||
* versions of Elasticsearch that require the pipelines to be serialized.
|
||||
* versions of OpenSearch that require the pipelines to be serialized.
|
||||
*/
|
||||
public InternalAggregations(List<InternalAggregation> aggregations, Supplier<PipelineAggregator.PipelineTree> pipelineTreeSource) {
|
||||
super(aggregations);
|
||||
|
|
|
@ -69,7 +69,7 @@ public class PreConfiguredTokenFilterTests extends OpenSearchTestCase {
|
|||
|
||||
public void testCachingWithElasticsearchVersion() throws IOException {
|
||||
PreConfiguredTokenFilter pctf =
|
||||
PreConfiguredTokenFilter.openSearchVersion("elasticsearch_version", randomBoolean(),
|
||||
PreConfiguredTokenFilter.openSearchVersion("opensearch_version", randomBoolean(),
|
||||
(tokenStream, esVersion) -> new TokenFilter(tokenStream) {
|
||||
@Override
|
||||
public boolean incrementToken() {
|
||||
|
@ -83,9 +83,9 @@ public class PreConfiguredTokenFilterTests extends OpenSearchTestCase {
|
|||
Settings settings1 = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1)
|
||||
.build();
|
||||
TokenFilterFactory tff_v1_1 =
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings1);
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "opensearch_version", settings1);
|
||||
TokenFilterFactory tff_v1_2 =
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings1);
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "opensearch_version", settings1);
|
||||
assertSame(tff_v1_1, tff_v1_2);
|
||||
|
||||
Version version2 = randomValueOtherThan(version1, () -> randomFrom(VersionUtils.allVersions()));
|
||||
|
@ -93,7 +93,7 @@ public class PreConfiguredTokenFilterTests extends OpenSearchTestCase {
|
|||
.build();
|
||||
|
||||
TokenFilterFactory tff_v2 =
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings2);
|
||||
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "opensearch_version", settings2);
|
||||
assertNotSame(tff_v1_1, tff_v2);
|
||||
}
|
||||
|
||||
|
|
|
@ -65,6 +65,6 @@ public class ScoreFunctionBuilderTests extends OpenSearchTestCase {
|
|||
Mockito.when(mapperService.fieldType(Mockito.anyString())).thenReturn(ft);
|
||||
Mockito.when(context.getMapperService()).thenReturn(mapperService);
|
||||
builder.toFunction(context);
|
||||
assertWarnings("As of version 7.0 Elasticsearch will require that a [field] parameter is provided when a [seed] is set");
|
||||
assertWarnings("OpenSearch requires that a [field] parameter is provided when a [seed] is set");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -253,13 +253,13 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Tests that plugins can register pre-configured char filters that vary in behavior based on Elasticsearch version, Lucene version,
|
||||
* Tests that plugins can register pre-configured char filters that vary in behavior based on OpenSearch version, Lucene version,
|
||||
* and that do not vary based on version at all.
|
||||
*/
|
||||
public void testPluginPreConfiguredCharFilters() throws IOException {
|
||||
boolean noVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean luceneVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean elasticsearchVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean opensearchVersionSupportsMultiTerm = randomBoolean();
|
||||
AnalysisRegistry registry = new AnalysisModule(TestEnvironment.newEnvironment(emptyNodeSettings),
|
||||
singletonList(new AnalysisPlugin() {
|
||||
@Override
|
||||
|
@ -269,7 +269,7 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
tokenStream -> new AppendCharFilter(tokenStream, "no_version")),
|
||||
PreConfiguredCharFilter.luceneVersion("lucene_version", luceneVersionSupportsMultiTerm,
|
||||
(tokenStream, luceneVersion) -> new AppendCharFilter(tokenStream, luceneVersion.toString())),
|
||||
PreConfiguredCharFilter.openSearchVersion("elasticsearch_version", elasticsearchVersionSupportsMultiTerm,
|
||||
PreConfiguredCharFilter.openSearchVersion("opensearch_version", opensearchVersionSupportsMultiTerm,
|
||||
(tokenStream, esVersion) -> new AppendCharFilter(tokenStream, esVersion.toString()))
|
||||
);
|
||||
}
|
||||
|
@ -288,30 +288,30 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
.put("index.analysis.analyzer.no_version.char_filter", "no_version")
|
||||
.put("index.analysis.analyzer.lucene_version.tokenizer", "keyword")
|
||||
.put("index.analysis.analyzer.lucene_version.char_filter", "lucene_version")
|
||||
.put("index.analysis.analyzer.elasticsearch_version.tokenizer", "keyword")
|
||||
.put("index.analysis.analyzer.elasticsearch_version.char_filter", "elasticsearch_version")
|
||||
.put("index.analysis.analyzer.opensearch_version.tokenizer", "keyword")
|
||||
.put("index.analysis.analyzer.opensearch_version.char_filter", "opensearch_version")
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.build());
|
||||
assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[] {"testno_version"});
|
||||
assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[] {"test" + version.luceneVersion});
|
||||
assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[] {"test" + version});
|
||||
assertTokenStreamContents(analyzers.get("opensearch_version").tokenStream("", "test"), new String[] {"test" + version});
|
||||
|
||||
assertEquals("test" + (noVersionSupportsMultiTerm ? "no_version" : ""),
|
||||
analyzers.get("no_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (luceneVersionSupportsMultiTerm ? version.luceneVersion.toString() : ""),
|
||||
analyzers.get("lucene_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (elasticsearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
analyzers.get("elasticsearch_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (opensearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
analyzers.get("opensearch_version").normalize("", "test").utf8ToString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that plugins can register pre-configured token filters that vary in behavior based on Elasticsearch version, Lucene version,
|
||||
* Tests that plugins can register pre-configured token filters that vary in behavior based on OpenSearch version, Lucene version,
|
||||
* and that do not vary based on version at all.
|
||||
*/
|
||||
public void testPluginPreConfiguredTokenFilters() throws IOException {
|
||||
boolean noVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean luceneVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean elasticsearchVersionSupportsMultiTerm = randomBoolean();
|
||||
boolean opensearchVersionSupportsMultiTerm = randomBoolean();
|
||||
AnalysisRegistry registry = new AnalysisModule(TestEnvironment.newEnvironment(emptyNodeSettings),
|
||||
singletonList(new AnalysisPlugin() {
|
||||
@Override
|
||||
|
@ -321,7 +321,7 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
tokenStream -> new AppendTokenFilter(tokenStream, "no_version")),
|
||||
PreConfiguredTokenFilter.luceneVersion("lucene_version", luceneVersionSupportsMultiTerm,
|
||||
(tokenStream, luceneVersion) -> new AppendTokenFilter(tokenStream, luceneVersion.toString())),
|
||||
PreConfiguredTokenFilter.openSearchVersion("elasticsearch_version", elasticsearchVersionSupportsMultiTerm,
|
||||
PreConfiguredTokenFilter.openSearchVersion("opensearch_version", opensearchVersionSupportsMultiTerm,
|
||||
(tokenStream, esVersion) -> new AppendTokenFilter(tokenStream, esVersion.toString()))
|
||||
);
|
||||
}
|
||||
|
@ -333,24 +333,24 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
.put("index.analysis.analyzer.no_version.filter", "no_version")
|
||||
.put("index.analysis.analyzer.lucene_version.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.lucene_version.filter", "lucene_version")
|
||||
.put("index.analysis.analyzer.elasticsearch_version.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.elasticsearch_version.filter", "elasticsearch_version")
|
||||
.put("index.analysis.analyzer.opensearch_version.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.opensearch_version.filter", "opensearch_version")
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.build());
|
||||
assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[] {"testno_version"});
|
||||
assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[] {"test" + version.luceneVersion});
|
||||
assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[] {"test" + version});
|
||||
assertTokenStreamContents(analyzers.get("opensearch_version").tokenStream("", "test"), new String[] {"test" + version});
|
||||
|
||||
assertEquals("test" + (noVersionSupportsMultiTerm ? "no_version" : ""),
|
||||
analyzers.get("no_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (luceneVersionSupportsMultiTerm ? version.luceneVersion.toString() : ""),
|
||||
analyzers.get("lucene_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (elasticsearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
analyzers.get("elasticsearch_version").normalize("", "test").utf8ToString());
|
||||
assertEquals("test" + (opensearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
analyzers.get("opensearch_version").normalize("", "test").utf8ToString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that plugins can register pre-configured token filters that vary in behavior based on Elasticsearch version, Lucene version,
|
||||
* Tests that plugins can register pre-configured token filters that vary in behavior based on OpenSearch version, Lucene version,
|
||||
* and that do not vary based on version at all.
|
||||
*/
|
||||
public void testPluginPreConfiguredTokenizers() throws IOException {
|
||||
|
@ -392,7 +392,7 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
PreConfiguredTokenizer.singleton("no_version", () -> new FixedTokenizer("no_version")),
|
||||
PreConfiguredTokenizer.luceneVersion("lucene_version",
|
||||
luceneVersion -> new FixedTokenizer(luceneVersion.toString())),
|
||||
PreConfiguredTokenizer.openSearchVersion("elasticsearch_version",
|
||||
PreConfiguredTokenizer.openSearchVersion("opensearch_version",
|
||||
esVersion -> new FixedTokenizer(esVersion.toString()))
|
||||
);
|
||||
}
|
||||
|
@ -402,20 +402,20 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
IndexAnalyzers analyzers = getIndexAnalyzers(registry, Settings.builder()
|
||||
.put("index.analysis.analyzer.no_version.tokenizer", "no_version")
|
||||
.put("index.analysis.analyzer.lucene_version.tokenizer", "lucene_version")
|
||||
.put("index.analysis.analyzer.elasticsearch_version.tokenizer", "elasticsearch_version")
|
||||
.put("index.analysis.analyzer.opensearch_version.tokenizer", "opensearch_version")
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.build());
|
||||
assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[]{"no_version"});
|
||||
assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[]{version.luceneVersion.toString()});
|
||||
assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[]{version.toString()});
|
||||
assertTokenStreamContents(analyzers.get("opensearch_version").tokenStream("", "test"), new String[]{version.toString()});
|
||||
|
||||
// These are current broken by https://github.com/elastic/elasticsearch/issues/24752
|
||||
// assertEquals("test" + (noVersionSupportsMultiTerm ? "no_version" : ""),
|
||||
// analyzers.get("no_version").normalize("", "test").utf8ToString());
|
||||
// assertEquals("test" + (luceneVersionSupportsMultiTerm ? version.luceneVersion.toString() : ""),
|
||||
// analyzers.get("lucene_version").normalize("", "test").utf8ToString());
|
||||
// assertEquals("test" + (elasticsearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
// analyzers.get("elasticsearch_version").normalize("", "test").utf8ToString());
|
||||
// assertEquals("test" + (opensearchVersionSupportsMultiTerm ? version.toString() : ""),
|
||||
// analyzers.get("opensearch_version").normalize("", "test").utf8ToString());
|
||||
}
|
||||
|
||||
public void testRegisterHunspellDictionary() throws Exception {
|
||||
|
|
|
@ -83,7 +83,7 @@ public class PluginInfoTests extends OpenSearchTestCase {
|
|||
assertThat(e.getMessage(), containsString("[version] is missing"));
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception {
|
||||
public void testReadFromPropertiesOpenSearchVersionMissing() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
PluginTestUtil.writePluginProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
|
@ -119,7 +119,7 @@ public class PluginInfoTests extends OpenSearchTestCase {
|
|||
" by \".\"'s and may have leading zeros but was 1.7.0_80"));
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesBogusElasticsearchVersion() throws Exception {
|
||||
public void testReadFromPropertiesBogusOpenSearchVersion() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
PluginTestUtil.writePluginProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
|
|
|
@ -615,11 +615,11 @@ public class PluginsServiceTests extends OpenSearchTestCase {
|
|||
assertEquals("Plugin [myplugin] cannot extend non-extensible plugin [nonextensible]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testIncompatibleElasticsearchVersion() throws Exception {
|
||||
public void testIncompatibleOpenSearchVersion() throws Exception {
|
||||
PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.V_6_0_0,
|
||||
"1.8", "FakePlugin", Collections.emptyList(), false);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.verifyCompatibility(info));
|
||||
assertThat(e.getMessage(), containsString("was built for Elasticsearch version 6.0.0"));
|
||||
assertThat(e.getMessage(), containsString("was built for OpenSearch version 6.0.0"));
|
||||
}
|
||||
|
||||
public void testIncompatibleJavaVersion() throws Exception {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue