[Rename] Property and metadata keys with prefix es. (#389)

Rename all property and metadata keys with prefix 'es.' to 'opensearch.'.

Signed-off-by: Rabi Panda <adnapibar@gmail.com>
This commit is contained in:
Rabi Panda 2021-03-18 13:19:22 -07:00 committed by Nick Knize
parent 3954c658dc
commit 13f6d23e40
86 changed files with 235 additions and 235 deletions

View File

@ -104,7 +104,7 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
boolean assertEsStdoutContains(String testCluster, String expectedOutput) { boolean assertEsStdoutContains(String testCluster, String expectedOutput) {
assert new File(testProjectDir.root, assert new File(testProjectDir.root,
"build/testclusters/${testCluster}-0/logs/es.stdout.log").text.contains(expectedOutput) "build/testclusters/${testCluster}-0/logs/opensearch.stdout.log").text.contains(expectedOutput)
true true
} }

View File

@ -124,7 +124,7 @@ class NodeInfo {
clusterName = project.path.replace(':', '_').substring(1) + '_' + prefix clusterName = project.path.replace(':', '_').substring(1) + '_' + prefix
} }
baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}") baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid') pidFile = new File(baseDir, 'opensearch.pid')
this.nodeVersion = Version.fromString(nodeVersion) this.nodeVersion = Version.fromString(nodeVersion)
this.isBwcNode = this.nodeVersion.before(VersionProperties.opensearch) this.isBwcNode = this.nodeVersion.before(VersionProperties.opensearch)
homeDir = new File(baseDir, "opensearch-${nodeVersion}") homeDir = new File(baseDir, "opensearch-${nodeVersion}")
@ -187,9 +187,9 @@ class NodeInfo {
env = [:] env = [:]
env.putAll(config.environmentVariables) env.putAll(config.environmentVariables)
for (Map.Entry<String, String> property : System.properties.entrySet()) { for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.key.startsWith('tests.es.')) { if (property.key.startsWith('tests.opensearch.')) {
args.add("-E") args.add("-E")
args.add("${property.key.substring('tests.es.'.size())}=${property.value}") args.add("${property.key.substring('tests.opensearch.'.size())}=${property.value}")
} }
} }
if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
@ -202,7 +202,7 @@ class NodeInfo {
else { else {
env.put('ES_PATH_CONF', pathConf) env.put('ES_PATH_CONF', pathConf)
} }
if (!System.properties.containsKey("tests.es.path.data")) { if (!System.properties.containsKey("tests.opensearch.path.data")) {
if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
/* /*
* We have to delay building the string as the path will not exist during configuration which will fail on Windows due to * We have to delay building the string as the path will not exist during configuration which will fail on Windows due to

View File

@ -162,19 +162,19 @@ public class OpenSearchTestBasePlugin implements Plugin<Project> {
// TODO: remove setting logging level via system property // TODO: remove setting logging level via system property
test.systemProperty("tests.logger.level", "WARN"); test.systemProperty("tests.logger.level", "WARN");
System.getProperties().entrySet().forEach(entry -> { System.getProperties().entrySet().forEach(entry -> {
if ((entry.getKey().toString().startsWith("tests.") || entry.getKey().toString().startsWith("es."))) { if ((entry.getKey().toString().startsWith("tests.") || entry.getKey().toString().startsWith("opensearch."))) {
test.systemProperty(entry.getKey().toString(), entry.getValue()); test.systemProperty(entry.getKey().toString(), entry.getValue());
} }
}); });
// TODO: remove this once ctx isn't added to update script params in 7.0 // TODO: remove this once ctx isn't added to update script params in 7.0
test.systemProperty("es.scripting.update.ctx_in_params", "false"); test.systemProperty("opensearch.scripting.update.ctx_in_params", "false");
// TODO: remove this property in 8.0 // TODO: remove this property in 8.0
test.systemProperty("es.search.rewrite_sort", "true"); test.systemProperty("opensearch.search.rewrite_sort", "true");
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
test.systemProperty("es.transport.cname_in_publish_address", "true"); test.systemProperty("opensearch.transport.cname_in_publish_address", "true");
// Set netty system properties to the properties we configure in jvm.options // Set netty system properties to the properties we configure in jvm.options
test.systemProperty("io.netty.noUnsafe", "true"); test.systemProperty("io.netty.noUnsafe", "true");

View File

@ -191,9 +191,9 @@ public class OpenSearchNode implements TestClusterConfiguration {
confPathLogs = workingDir.resolve("logs"); confPathLogs = workingDir.resolve("logs");
transportPortFile = confPathLogs.resolve("transport.ports"); transportPortFile = confPathLogs.resolve("transport.ports");
httpPortsFile = confPathLogs.resolve("http.ports"); httpPortsFile = confPathLogs.resolve("http.ports");
esStdoutFile = confPathLogs.resolve("es.stdout.log"); esStdoutFile = confPathLogs.resolve("opensearch.stdout.log");
esStderrFile = confPathLogs.resolve("es.stderr.log"); esStderrFile = confPathLogs.resolve("opensearch.stderr.log");
esStdinFile = workingDir.resolve("es.stdin"); esStdinFile = workingDir.resolve("opensearch.stdin");
tmpDir = workingDir.resolve("tmp"); tmpDir = workingDir.resolve("tmp");
waitConditions.put("ports files", this::checkPortsFilesExistWithDelay); waitConditions.put("ports files", this::checkPortsFilesExistWithDelay);

View File

@ -42,7 +42,7 @@ import java.util.stream.Collectors;
public class RunTask extends DefaultTestClustersTask { public class RunTask extends DefaultTestClustersTask {
private static final Logger logger = Logging.getLogger(RunTask.class); private static final Logger logger = Logging.getLogger(RunTask.class);
public static final String CUSTOM_SETTINGS_PREFIX = "tests.es."; public static final String CUSTOM_SETTINGS_PREFIX = "tests.opensearch.";
private Boolean debug = false; private Boolean debug = false;

View File

@ -79,7 +79,7 @@ check.dependsOn(asyncIntegTest)
testClusters.all { testClusters.all {
testDistribution = 'DEFAULT' testDistribution = 'DEFAULT'
systemProperty 'es.scripting.update.ctx_in_params', 'false' systemProperty 'opensearch.scripting.update.ctx_in_params', 'false'
setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]' setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]'
extraConfigFile 'roles.yml', file('roles.yml') extraConfigFile 'roles.yml', file('roles.yml')

View File

@ -126,7 +126,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" + docId + "]: " + assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" + docId + "]: " +
"version conflict, required seqNo [2], primary term [2]. current document has seqNo [3] and primary term [1]]", "version conflict, required seqNo [2], primary term [2]. current document has seqNo [3] and primary term [1]]",
exception.getMessage()); exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
{ {
// Testing version type // Testing version type
@ -154,7 +154,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" + assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" +
docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage()); docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
{ {
// Testing routing // Testing routing
@ -300,7 +300,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
() -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync));
assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage()); assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
IndexRequest index = new IndexRequest("index").id("id"); IndexRequest index = new IndexRequest("index").id("id");
String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
@ -314,7 +314,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, " + "reason=[id]: " + assertEquals("OpenSearch exception [type=version_conflict_engine_exception, " + "reason=[id]: " +
"version conflict, current version [1] is different than the one provided [2]]", exception.getMessage()); "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
{ {
GetRequest getRequest = new GetRequest("index", "id"); GetRequest getRequest = new GetRequest("index", "id");
@ -493,7 +493,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync)); () -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync));
assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage()); assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
IndexRequest index = new IndexRequest("index").id("id"); IndexRequest index = new IndexRequest("index").id("id");
String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
@ -606,7 +606,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[id]: " + assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[id]: " +
"version conflict, required seqNo [1], primary term [5]. current document has seqNo [2] and primary term [1]]", "version conflict, required seqNo [1], primary term [5]. current document has seqNo [2] and primary term [1]]",
exception.getMessage()); exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0)); assertEquals("index", exception.getMetadata("opensearch.index").get(0));
} }
{ {
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> { OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> {

View File

@ -820,7 +820,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND));
assertThat(exception.getMessage(), assertThat(exception.getMessage(),
equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]"));
assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); assertThat(exception.getMetadata("opensearch.index"), hasItem(nonExistentIndex));
createIndex(index, Settings.EMPTY); createIndex(index, Settings.EMPTY);
IndicesAliasesRequest mixedRequest = new IndicesAliasesRequest(); IndicesAliasesRequest mixedRequest = new IndicesAliasesRequest();
@ -831,8 +831,8 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND));
assertThat(exception.getMessage(), assertThat(exception.getMessage(),
equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]"));
assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); assertThat(exception.getMetadata("opensearch.index"), hasItem(nonExistentIndex));
assertThat(exception.getMetadata("es.index"), not(hasItem(index))); assertThat(exception.getMetadata("opensearch.index"), not(hasItem(index)));
assertThat(aliasExists(index, alias), equalTo(false)); assertThat(aliasExists(index, alias), equalTo(false));
assertThat(aliasExists(alias), equalTo(false)); assertThat(aliasExists(alias), equalTo(false));
@ -844,8 +844,8 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND));
assertThat(exception.getMessage(), assertThat(exception.getMessage(),
equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]"));
assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); assertThat(exception.getMetadata("opensearch.index"), hasItem(nonExistentIndex));
assertThat(exception.getMetadata("es.index"), not(hasItem(index))); assertThat(exception.getMetadata("opensearch.index"), not(hasItem(index)));
assertThat(aliasExists(index, alias), equalTo(false)); assertThat(aliasExists(index, alias), equalTo(false));
assertThat(aliasExists(alias), equalTo(false)); assertThat(aliasExists(alias), equalTo(false));
} }

View File

@ -34,7 +34,7 @@ public class OpenSearchExceptionTests extends AbstractResponseTestCase<org.opens
IllegalArgumentException iae = new IllegalArgumentException("argument", ies); IllegalArgumentException iae = new IllegalArgumentException("argument", ies);
org.opensearch.OpenSearchException exception = new org.opensearch.OpenSearchException("elastic_exception", iae); org.opensearch.OpenSearchException exception = new org.opensearch.OpenSearchException("elastic_exception", iae);
exception.addHeader("key","value"); exception.addHeader("key","value");
exception.addMetadata("es.meta","data"); exception.addMetadata("opensearch.meta","data");
exception.addSuppressed(new NumberFormatException("3/0")); exception.addSuppressed(new NumberFormatException("3/0"));
return exception; return exception;
} }

View File

@ -101,6 +101,6 @@ public class AppendProcessorFactoryTests extends OpenSearchTestCase {
OpenSearchException exception = expectThrows(OpenSearchException.class, OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> factory.create(null, processorTag, null, config)); () -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
} }
} }

View File

@ -58,9 +58,9 @@ public class ConvertProcessorFactoryTests extends OpenSearchTestCase {
fail("factory create should have failed"); fail("factory create should have failed");
} catch (OpenSearchParseException e) { } catch (OpenSearchParseException e) {
assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field.")); assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field."));
assertThat(e.getMetadata("es.processor_type").get(0), equalTo(ConvertProcessor.TYPE)); assertThat(e.getMetadata("opensearch.processor_type").get(0), equalTo(ConvertProcessor.TYPE));
assertThat(e.getMetadata("es.property_name").get(0), equalTo("type")); assertThat(e.getMetadata("opensearch.property_name").get(0), equalTo("type"));
assertThat(e.getMetadata("es.processor_tag"), nullValue()); assertThat(e.getMetadata("opensearch.processor_tag"), nullValue());
} }
} }

View File

@ -67,6 +67,6 @@ public class FailProcessorFactoryTests extends OpenSearchTestCase {
OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag,
null, config)); null, config));
assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
} }
} }

View File

@ -80,6 +80,6 @@ public class RemoveProcessorFactoryTests extends OpenSearchTestCase {
OpenSearchException exception = expectThrows(OpenSearchException.class, OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> factory.create(null, processorTag, null, config)); () -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
} }
} }

View File

@ -109,7 +109,7 @@ public class SetProcessorFactoryTests extends OpenSearchTestCase {
OpenSearchException exception = expectThrows(OpenSearchException.class, OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> factory.create(null, processorTag, null, config)); () -> factory.create(null, processorTag, null, config));
assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
} }
} }

View File

@ -80,6 +80,6 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
tasks.named("test").configure { tasks.named("test").configure {
// Windows cannot cleanup database files properly unless it loads everything on heap. // Windows cannot cleanup database files properly unless it loads everything on heap.
// See https://github.com/maxmind/MaxMind-DB-Reader-java#file-lock-on-windows for more information // See https://github.com/maxmind/MaxMind-DB-Reader-java#file-lock-on-windows for more information
systemProperty 'es.geoip.load_db_on_heap', 'true' systemProperty 'opensearch.geoip.load_db_on_heap', 'true'
} }
} }

View File

@ -100,7 +100,7 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable
static Map<String, DatabaseReaderLazyLoader> loadDatabaseReaders(Path geoIpDirectory, Path geoIpConfigDirectory) throws IOException { static Map<String, DatabaseReaderLazyLoader> loadDatabaseReaders(Path geoIpDirectory, Path geoIpConfigDirectory) throws IOException {
assertDatabaseExistence(geoIpDirectory, true); assertDatabaseExistence(geoIpDirectory, true);
assertDatabaseExistence(geoIpConfigDirectory, false); assertDatabaseExistence(geoIpConfigDirectory, false);
final boolean loadDatabaseOnHeap = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false")); final boolean loadDatabaseOnHeap = Booleans.parseBoolean(System.getProperty("opensearch.geoip.load_db_on_heap", "false"));
final Map<String, DatabaseReaderLazyLoader> databaseReaders = new HashMap<>(); final Map<String, DatabaseReaderLazyLoader> databaseReaders = new HashMap<>();
// load the default databases // load the default databases

View File

@ -28,9 +28,9 @@ esplugin {
testClusters.all { testClusters.all {
module ':modules:mapper-extras' module ':modules:mapper-extras'
systemProperty 'es.scripting.update.ctx_in_params', 'false' systemProperty 'opensearch.scripting.update.ctx_in_params', 'false'
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
systemProperty 'es.transport.cname_in_publish_address', 'true' systemProperty 'opensearch.transport.cname_in_publish_address', 'true'
} }
dependencies { dependencies {

View File

@ -63,12 +63,12 @@ public class PainlessExplainError extends Error {
} }
} }
headers.put("es.to_string", singletonList(toString)); headers.put("opensearch.to_string", singletonList(toString));
if (painlessClassName != null) { if (painlessClassName != null) {
headers.put("es.painless_class", singletonList(painlessClassName)); headers.put("opensearch.painless_class", singletonList(painlessClassName));
} }
if (javaClassName != null) { if (javaClassName != null) {
headers.put("es.java_class", singletonList(javaClassName)); headers.put("opensearch.java_class", singletonList(javaClassName));
} }
return headers; return headers;
} }

View File

@ -45,16 +45,16 @@ public class DebugTests extends ScriptTestCase {
PainlessExplainError e = expectScriptThrows(PainlessExplainError.class, () -> exec( PainlessExplainError e = expectScriptThrows(PainlessExplainError.class, () -> exec(
"Debug.explain(params.a)", singletonMap("a", dummy), true)); "Debug.explain(params.a)", singletonMap("a", dummy), true));
assertSame(dummy, e.getObjectToExplain()); assertSame(dummy, e.getObjectToExplain());
assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList(dummy.toString()))); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.to_string", singletonList(dummy.toString())));
assertThat(e.getHeaders(painlessLookup), hasEntry("es.java_class", singletonList("java.lang.Object"))); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.java_class", singletonList("java.lang.Object")));
assertThat(e.getHeaders(painlessLookup), hasEntry("es.painless_class", singletonList("java.lang.Object"))); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.painless_class", singletonList("java.lang.Object")));
// Null should be ok // Null should be ok
e = expectScriptThrows(PainlessExplainError.class, () -> exec("Debug.explain(null)")); e = expectScriptThrows(PainlessExplainError.class, () -> exec("Debug.explain(null)"));
assertNull(e.getObjectToExplain()); assertNull(e.getObjectToExplain());
assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList("null"))); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.to_string", singletonList("null")));
assertThat(e.getHeaders(painlessLookup), not(hasKey("es.java_class"))); assertThat(e.getHeaders(painlessLookup), not(hasKey("opensearch.java_class")));
assertThat(e.getHeaders(painlessLookup), not(hasKey("es.painless_class"))); assertThat(e.getHeaders(painlessLookup), not(hasKey("opensearch.painless_class")));
// You can't catch the explain exception // You can't catch the explain exception
e = expectScriptThrows(PainlessExplainError.class, () -> exec( e = expectScriptThrows(PainlessExplainError.class, () -> exec(
@ -72,17 +72,17 @@ public class DebugTests extends ScriptTestCase {
public void testPainlessExplainErrorSerialization() throws IOException { public void testPainlessExplainErrorSerialization() throws IOException {
Map<String, Object> params = singletonMap("a", "jumped over the moon"); Map<String, Object> params = singletonMap("a", "jumped over the moon");
ScriptException e = expectThrows(ScriptException.class, () -> exec("Debug.explain(params.a)", params, true)); ScriptException e = expectThrows(ScriptException.class, () -> exec("Debug.explain(params.a)", params, true));
assertEquals(singletonList("jumped over the moon"), e.getMetadata("es.to_string")); assertEquals(singletonList("jumped over the moon"), e.getMetadata("opensearch.to_string"));
assertEquals(singletonList("java.lang.String"), e.getMetadata("es.java_class")); assertEquals(singletonList("java.lang.String"), e.getMetadata("opensearch.java_class"));
assertEquals(singletonList("java.lang.String"), e.getMetadata("es.painless_class")); assertEquals(singletonList("java.lang.String"), e.getMetadata("opensearch.painless_class"));
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeException(e); out.writeException(e);
try (StreamInput in = out.bytes().streamInput()) { try (StreamInput in = out.bytes().streamInput()) {
OpenSearchException read = (ScriptException) in.readException(); OpenSearchException read = (ScriptException) in.readException();
assertEquals(singletonList("jumped over the moon"), read.getMetadata("es.to_string")); assertEquals(singletonList("jumped over the moon"), read.getMetadata("opensearch.to_string"));
assertEquals(singletonList("java.lang.String"), read.getMetadata("es.java_class")); assertEquals(singletonList("java.lang.String"), read.getMetadata("opensearch.java_class"));
assertEquals(singletonList("java.lang.String"), read.getMetadata("es.painless_class")); assertEquals(singletonList("java.lang.String"), read.getMetadata("opensearch.painless_class"));
} }
} }
} }

View File

@ -48,7 +48,7 @@ test {
* same JVM randomize processors and will step on each other if we allow them to * same JVM randomize processors and will step on each other if we allow them to
* set the number of available processors as it's set-once in Netty. * set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
} }
dependencies { dependencies {

View File

@ -65,27 +65,27 @@ test {
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty. * other if we allow them to set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
} }
internalClusterTest { internalClusterTest {
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
} }
javaRestTest { javaRestTest {
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
} }
TaskProvider<Test> pooledTest = tasks.register("pooledTest", Test) { TaskProvider<Test> pooledTest = tasks.register("pooledTest", Test) {
include '**/*Tests.class' include '**/*Tests.class'
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
systemProperty 'es.use_unpooled_allocator', 'false' systemProperty 'opensearch.use_unpooled_allocator', 'false'
} }
TaskProvider<Test> pooledInternalClusterTest = tasks.register("pooledInternalClusterTest", Test) { TaskProvider<Test> pooledInternalClusterTest = tasks.register("pooledInternalClusterTest", Test) {
include '**/*IT.class' include '**/*IT.class'
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
systemProperty 'es.use_unpooled_allocator', 'false' systemProperty 'opensearch.use_unpooled_allocator', 'false'
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet internalTestSourceSet = sourceSets.getByName(InternalClusterTestPlugin.SOURCE_SET_NAME) SourceSet internalTestSourceSet = sourceSets.getByName(InternalClusterTestPlugin.SOURCE_SET_NAME)
setTestClassesDirs(internalTestSourceSet.getOutput().getClassesDirs()) setTestClassesDirs(internalTestSourceSet.getOutput().getClassesDirs())
@ -93,14 +93,14 @@ TaskProvider<Test> pooledInternalClusterTest = tasks.register("pooledInternalClu
} }
RestIntegTestTask pooledJavaRestTest = tasks.create("pooledJavaRestTest", RestIntegTestTask) { RestIntegTestTask pooledJavaRestTest = tasks.create("pooledJavaRestTest", RestIntegTestTask) {
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet javaRestTestSourceSet = sourceSets.getByName(JavaRestTestPlugin.SOURCE_SET_NAME) SourceSet javaRestTestSourceSet = sourceSets.getByName(JavaRestTestPlugin.SOURCE_SET_NAME)
setTestClassesDirs(javaRestTestSourceSet.getOutput().getClassesDirs()) setTestClassesDirs(javaRestTestSourceSet.getOutput().getClassesDirs())
setClasspath(javaRestTestSourceSet.getRuntimeClasspath()) setClasspath(javaRestTestSourceSet.getRuntimeClasspath())
} }
testClusters.pooledJavaRestTest { testClusters.pooledJavaRestTest {
systemProperty 'es.use_unpooled_allocator', 'false' systemProperty 'opensearch.use_unpooled_allocator', 'false'
} }
check.dependsOn(pooledTest, pooledJavaRestTest, pooledInternalClusterTest) check.dependsOn(pooledTest, pooledJavaRestTest, pooledInternalClusterTest)

View File

@ -39,7 +39,7 @@ import java.util.List;
@ChannelHandler.Sharable @ChannelHandler.Sharable
class Netty4HttpResponseCreator extends MessageToMessageEncoder<Netty4HttpResponse> { class Netty4HttpResponseCreator extends MessageToMessageEncoder<Netty4HttpResponse> {
private static final String DO_NOT_SPLIT = "es.unsafe.do_not_split_http_responses"; private static final String DO_NOT_SPLIT = "opensearch.unsafe.do_not_split_http_responses";
private static final boolean DO_NOT_SPLIT_HTTP_RESPONSES; private static final boolean DO_NOT_SPLIT_HTTP_RESPONSES;
private static final int SPLIT_THRESHOLD; private static final int SPLIT_THRESHOLD;

View File

@ -94,7 +94,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport {
* *
* By default we assume the Ethernet MTU (1500 bytes) but users can override it with a system property. * By default we assume the Ethernet MTU (1500 bytes) but users can override it with a system property.
*/ */
private static final ByteSizeValue MTU = new ByteSizeValue(Long.parseLong(System.getProperty("es.net.mtu", "1500"))); private static final ByteSizeValue MTU = new ByteSizeValue(Long.parseLong(System.getProperty("opensearch.net.mtu", "1500")));
private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components"; private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components";

View File

@ -62,7 +62,7 @@ import static io.netty.channel.internal.ChannelUtils.MAX_BYTES_PER_GATHERING_WRI
public class CopyBytesSocketChannel extends Netty4NioSocketChannel { public class CopyBytesSocketChannel extends Netty4NioSocketChannel {
private static final int MAX_BYTES_PER_WRITE = StrictMath.toIntExact(ByteSizeValue.parseBytesSizeValue( private static final int MAX_BYTES_PER_WRITE = StrictMath.toIntExact(ByteSizeValue.parseBytesSizeValue(
System.getProperty("es.transport.buffer.size", "1m"), "es.transport.buffer.size").getBytes()); System.getProperty("opensearch.transport.buffer.size", "1m"), "opensearch.transport.buffer.size").getBytes());
private static final ThreadLocal<ByteBuffer> ioBuffer = ThreadLocal.withInitial(() -> ByteBuffer.allocateDirect(MAX_BYTES_PER_WRITE)); private static final ThreadLocal<ByteBuffer> ioBuffer = ThreadLocal.withInitial(() -> ByteBuffer.allocateDirect(MAX_BYTES_PER_WRITE));
private final WriteConfig writeConfig = new WriteConfig(); private final WriteConfig writeConfig = new WriteConfig();

View File

@ -44,16 +44,16 @@ public class NettyAllocator {
private static final ByteBufAllocator ALLOCATOR; private static final ByteBufAllocator ALLOCATOR;
private static final String DESCRIPTION; private static final String DESCRIPTION;
private static final String USE_UNPOOLED = "es.use_unpooled_allocator"; private static final String USE_UNPOOLED = "opensearch.use_unpooled_allocator";
private static final String USE_NETTY_DEFAULT = "es.unsafe.use_netty_default_allocator"; private static final String USE_NETTY_DEFAULT = "opensearch.unsafe.use_netty_default_allocator";
private static final String USE_NETTY_DEFAULT_CHUNK = "es.unsafe.use_netty_default_chunk_and_page_size"; private static final String USE_NETTY_DEFAULT_CHUNK = "opensearch.unsafe.use_netty_default_chunk_and_page_size";
static { static {
if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) {
ALLOCATOR = ByteBufAllocator.DEFAULT; ALLOCATOR = ByteBufAllocator.DEFAULT;
SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024;
DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE)
+ ", factors={es.unsafe.use_netty_default_allocator=true}]"; + ", factors={opensearch.unsafe.use_netty_default_allocator=true}]";
} else { } else {
final long heapSizeInBytes = JvmInfo.jvmInfo().getMem().getHeapMax().getBytes(); final long heapSizeInBytes = JvmInfo.jvmInfo().getMem().getHeapMax().getBytes();
final boolean g1gcEnabled = Boolean.parseBoolean(JvmInfo.jvmInfo().useG1GC()); final boolean g1gcEnabled = Boolean.parseBoolean(JvmInfo.jvmInfo().useG1GC());
@ -73,7 +73,7 @@ public class NettyAllocator {
SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024;
} }
DESCRIPTION = "[name=unpooled, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) DESCRIPTION = "[name=unpooled, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE)
+ ", factors={es.unsafe.use_unpooled_allocator=" + System.getProperty(USE_UNPOOLED) + ", factors={opensearch.unsafe.use_unpooled_allocator=" + System.getProperty(USE_UNPOOLED)
+ ", g1gc_enabled=" + g1gcEnabled + ", g1gc_enabled=" + g1gcEnabled
+ ", g1gc_region_size=" + g1gcRegionSize + ", g1gc_region_size=" + g1gcRegionSize
+ ", heap_size=" + heapSize + "}]"; + ", heap_size=" + heapSize + "}]";
@ -108,7 +108,7 @@ public class NettyAllocator {
SUGGESTED_MAX_ALLOCATION_SIZE = chunkSizeInBytes; SUGGESTED_MAX_ALLOCATION_SIZE = chunkSizeInBytes;
DESCRIPTION = "[name=opensearch_configured, chunk_size=" + chunkSize DESCRIPTION = "[name=opensearch_configured, chunk_size=" + chunkSize
+ ", suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) + ", suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE)
+ ", factors={es.unsafe.use_netty_default_chunk_and_page_size=" + useDefaultChunkAndPageSize() + ", factors={opensearch.unsafe.use_netty_default_chunk_and_page_size=" + useDefaultChunkAndPageSize()
+ ", g1gc_enabled=" + g1gcEnabled + ", g1gc_enabled=" + g1gcEnabled
+ ", g1gc_region_size=" + g1gcRegionSize + "}]"; + ", g1gc_region_size=" + g1gcRegionSize + "}]";
} }

View File

@ -48,7 +48,7 @@ public class Netty4Utils {
*/ */
public static void setAvailableProcessors(final int availableProcessors) { public static void setAvailableProcessors(final int availableProcessors) {
// we set this to false in tests to avoid tests that randomly set processors from stepping on each other // we set this to false in tests to avoid tests that randomly set processors from stepping on each other
final boolean set = Booleans.parseBoolean(System.getProperty("es.set.netty.runtime.available.processors", "true")); final boolean set = Booleans.parseBoolean(System.getProperty("opensearch.set.netty.runtime.available.processors", "true"));
if (!set) { if (!set) {
return; return;
} }

View File

@ -65,7 +65,7 @@ testClusters.yamlRestTest {
// use gce fixture for Auth calls instead of http://metadata.google.internal // use gce fixture for Auth calls instead of http://metadata.google.internal
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`) // allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true' systemProperty 'opensearch.allow_reroute_gce_settings', 'true'
setting 'discovery.seed_providers', 'gce' setting 'discovery.seed_providers', 'gce'
// use gce fixture for metadata server calls instead of http://metadata.google.internal // use gce fixture for metadata server calls instead of http://metadata.google.internal

View File

@ -53,7 +53,7 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
/** Determines whether settings those reroutes GCE call should be allowed (for testing purposes only). */ /** Determines whether settings those reroutes GCE call should be allowed (for testing purposes only). */
private static final boolean ALLOW_REROUTE_GCE_SETTINGS = private static final boolean ALLOW_REROUTE_GCE_SETTINGS =
Booleans.parseBoolean(System.getProperty("es.allow_reroute_gce_settings", "false")); Booleans.parseBoolean(System.getProperty("opensearch.allow_reroute_gce_settings", "false"));
public static final String GCE = "gce"; public static final String GCE = "gce";
protected final Settings settings; protected final Settings settings;

View File

@ -311,7 +311,7 @@ testClusters {
plugin bundlePlugin.archiveFile plugin bundlePlugin.archiveFile
// force large blob uploads by setting the threshold small, forcing this code path to be tested // force large blob uploads by setting the threshold small, forcing this code path to be tested
systemProperty 'es.repository_gcs.large_blob_threshold_byte_size', '256' systemProperty 'opensearch.repository_gcs.large_blob_threshold_byte_size', '256'
} }
} }

View File

@ -76,7 +76,7 @@ class GoogleCloudStorageBlobStore implements BlobStore {
public static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE; public static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE;
static { static {
final String key = "es.repository_gcs.large_blob_threshold_byte_size"; final String key = "opensearch.repository_gcs.large_blob_threshold_byte_size";
final String largeBlobThresholdByteSizeProperty = System.getProperty(key); final String largeBlobThresholdByteSizeProperty = System.getProperty(key);
if (largeBlobThresholdByteSizeProperty == null) { if (largeBlobThresholdByteSizeProperty == null) {
LARGE_BLOB_THRESHOLD_BYTE_SIZE = Math.toIntExact(new ByteSizeValue(5, ByteSizeUnit.MB).getBytes()); LARGE_BLOB_THRESHOLD_BYTE_SIZE = Math.toIntExact(new ByteSizeValue(5, ByteSizeUnit.MB).getBytes());

View File

@ -79,7 +79,7 @@ bundlePlugin {
task testRepositoryCreds(type: Test) { task testRepositoryCreds(type: Test) {
include '**/RepositoryCredentialsTests.class' include '**/RepositoryCredentialsTests.class'
systemProperty 'es.allow_insecure_settings', 'true' systemProperty 'opensearch.allow_insecure_settings', 'true'
} }
check.dependsOn(testRepositoryCreds) check.dependsOn(testRepositoryCreds)

View File

@ -66,7 +66,7 @@ public class RepositoryCredentialsTests extends OpenSearchSingleNodeTestCase {
static { static {
AccessController.doPrivileged((PrivilegedAction<Void>) () -> { AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
// required for client settings overwriting when running in IDE // required for client settings overwriting when running in IDE
System.setProperty("es.allow_insecure_settings", "true"); System.setProperty("opensearch.allow_insecure_settings", "true");
return null; return null;
}); });
} }

View File

@ -38,7 +38,7 @@ import java.util.List;
@ChannelHandler.Sharable @ChannelHandler.Sharable
public class NioHttpResponseCreator extends MessageToMessageEncoder<NioHttpResponse> { public class NioHttpResponseCreator extends MessageToMessageEncoder<NioHttpResponse> {
private static final String DO_NOT_SPLIT = "es.unsafe.do_not_split_http_responses"; private static final String DO_NOT_SPLIT = "opensearch.unsafe.do_not_split_http_responses";
private static final boolean DO_NOT_SPLIT_HTTP_RESPONSES; private static final boolean DO_NOT_SPLIT_HTTP_RESPONSES;
private static final int SPLIT_THRESHOLD; private static final int SPLIT_THRESHOLD;

View File

@ -31,7 +31,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.opensearch.bootstrap.BootstrapChecks.ES_ENFORCE_BOOTSTRAP_CHECKS; import static org.opensearch.bootstrap.BootstrapChecks.OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS;
import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.hasToString;
@ -41,7 +41,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions;
public class EvilBootstrapChecksTests extends AbstractBootstrapCheckTestCase { public class EvilBootstrapChecksTests extends AbstractBootstrapCheckTestCase {
private String esEnforceBootstrapChecks = System.getProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); private String esEnforceBootstrapChecks = System.getProperty(OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS);
@Override @Override
@Before @Before
@ -88,16 +88,16 @@ public class EvilBootstrapChecksTests extends AbstractBootstrapCheckTestCase {
IllegalArgumentException.class, IllegalArgumentException.class,
() -> BootstrapChecks.check(emptyContext, enforceLimits, emptyList())); () -> BootstrapChecks.check(emptyContext, enforceLimits, emptyList()));
final Matcher<String> matcher = containsString( final Matcher<String> matcher = containsString(
"[es.enforce.bootstrap.checks] must be [true] but was [" + value + "]"); "[opensearch.enforce.bootstrap.checks] must be [true] but was [" + value + "]");
assertThat(e, hasToString(matcher)); assertThat(e, hasToString(matcher));
} }
@SuppressForbidden(reason = "set or clear system property es.enforce.bootstrap.checks") @SuppressForbidden(reason = "set or clear system property opensearch.enforce.bootstrap.checks")
public void setEsEnforceBootstrapChecks(final String value) { public void setEsEnforceBootstrapChecks(final String value) {
if (value == null) { if (value == null) {
System.clearProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); System.clearProperty(OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS);
} else { } else {
System.setProperty(ES_ENFORCE_BOOTSTRAP_CHECKS, value); System.setProperty(OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS, value);
} }
} }

View File

@ -33,9 +33,9 @@ public class EvilOpenSearchCliTests extends BaseOpenSearchCliTestCase {
@SuppressForbidden(reason = "manipulates system properties for testing") @SuppressForbidden(reason = "manipulates system properties for testing")
public void testPathHome() throws Exception { public void testPathHome() throws Exception {
final String pathHome = System.getProperty("es.path.home"); final String pathHome = System.getProperty("opensearch.path.home");
final String value = randomAlphaOfLength(16); final String value = randomAlphaOfLength(16);
System.setProperty("es.path.home", value); System.setProperty("opensearch.path.home", value);
runTest( runTest(
ExitCodes.OK, ExitCodes.OK,
@ -50,7 +50,7 @@ public class EvilOpenSearchCliTests extends BaseOpenSearchCliTestCase {
assertThat(settings.keySet(), hasItem("path.logs")); // added by env initialization assertThat(settings.keySet(), hasItem("path.logs")); // added by env initialization
}); });
System.clearProperty("es.path.home"); System.clearProperty("opensearch.path.home");
final String commandLineValue = randomAlphaOfLength(16); final String commandLineValue = randomAlphaOfLength(16);
runTest( runTest(
ExitCodes.OK, ExitCodes.OK,
@ -66,8 +66,8 @@ public class EvilOpenSearchCliTests extends BaseOpenSearchCliTestCase {
}, },
"-Epath.home=" + commandLineValue); "-Epath.home=" + commandLineValue);
if (pathHome != null) System.setProperty("es.path.home", pathHome); if (pathHome != null) System.setProperty("opensearch.path.home", pathHome);
else System.clearProperty("es.path.home"); else System.clearProperty("opensearch.path.home");
} }
} }

View File

@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.hasToString;
public class EvilEnvironmentAwareCommandTests extends OpenSearchTestCase { public class EvilEnvironmentAwareCommandTests extends OpenSearchTestCase {
@Rule @Rule
public TestRule restoreSystemProperties = new TestRuleRestoreSystemProperties("es.path.conf"); public TestRule restoreSystemProperties = new TestRuleRestoreSystemProperties("opensearch.path.conf");
public void testEsPathConfNotSet() throws Exception { public void testEsPathConfNotSet() throws Exception {
clearEsPathConf(); clearEsPathConf();
@ -54,12 +54,12 @@ public class EvilEnvironmentAwareCommandTests extends OpenSearchTestCase {
final TestEnvironmentAwareCommand command = new TestEnvironmentAwareCommand("test"); final TestEnvironmentAwareCommand command = new TestEnvironmentAwareCommand("test");
final UserException e = final UserException e =
expectThrows(UserException.class, () -> command.mainWithoutErrorHandling(new String[0], new MockTerminal())); expectThrows(UserException.class, () -> command.mainWithoutErrorHandling(new String[0], new MockTerminal()));
assertThat(e, hasToString(containsString("the system property [es.path.conf] must be set"))); assertThat(e, hasToString(containsString("the system property [opensearch.path.conf] must be set")));
} }
@SuppressForbidden(reason = "clears system property es.path.conf as part of test setup") @SuppressForbidden(reason = "clears system property opensearch.path.conf as part of test setup")
private void clearEsPathConf() { private void clearEsPathConf() {
System.clearProperty("es.path.conf"); System.clearProperty("opensearch.path.conf");
} }
} }

View File

@ -34,14 +34,14 @@ public class EvilSystemPropertyTests extends OpenSearchTestCase {
Integer numShards = IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.get(Settings.builder().put("index.number_of_shards", 100).build()); Integer numShards = IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.get(Settings.builder().put("index.number_of_shards", 100).build());
assertEquals(100, numShards.intValue()); assertEquals(100, numShards.intValue());
int limit = randomIntBetween(1, 10); int limit = randomIntBetween(1, 10);
System.setProperty("es.index.max_number_of_shards", Integer.toString(limit)); System.setProperty("opensearch.index.max_number_of_shards", Integer.toString(limit));
try { try {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
IndexMetadata.buildNumberOfShardsSetting() IndexMetadata.buildNumberOfShardsSetting()
.get(Settings.builder().put("index.number_of_shards", 11).build())); .get(Settings.builder().put("index.number_of_shards", 11).build()));
assertEquals("Failed to parse value [11] for setting [index.number_of_shards] must be <= " + limit, e.getMessage()); assertEquals("Failed to parse value [11] for setting [index.number_of_shards] must be <= " + limit, e.getMessage());
} finally { } finally {
System.clearProperty("es.index.max_number_of_shards"); System.clearProperty("opensearch.index.max_number_of_shards");
} }
} }
} }

View File

@ -37,13 +37,13 @@ public class EvilSystemPropertyTests extends OpenSearchTestCase {
assertWarnings(OperationRouting.IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE); assertWarnings(OperationRouting.IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE);
assertThat(routing.getAwarenessAttributes().size(), equalTo(1)); assertThat(routing.getAwarenessAttributes().size(), equalTo(1));
assertThat(routing.getAwarenessAttributes().get(0), equalTo("test")); assertThat(routing.getAwarenessAttributes().get(0), equalTo("test"));
System.setProperty("es.search.ignore_awareness_attributes", "true"); System.setProperty("opensearch.search.ignore_awareness_attributes", "true");
try { try {
routing = new OperationRouting(indexSettings, routing = new OperationRouting(indexSettings,
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
assertTrue(routing.getAwarenessAttributes().isEmpty()); assertTrue(routing.getAwarenessAttributes().isEmpty());
} finally { } finally {
System.clearProperty("es.search.ignore_awareness_attributes"); System.clearProperty("opensearch.search.ignore_awareness_attributes");
} }
} }

View File

@ -91,9 +91,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
testLogger.debug("This is a debug message"); testLogger.debug("This is a debug message");
testLogger.trace("This is a trace message"); testLogger.trace("This is a trace message");
final String path = final String path =
System.getProperty("es.logs.base_path") + System.getProperty("opensearch.logs.base_path") +
System.getProperty("file.separator") + System.getProperty("file.separator") +
System.getProperty("es.logs.cluster_name") + System.getProperty("opensearch.logs.cluster_name") +
".log"; ".log";
final List<String> events = Files.readAllLines(PathUtils.get(path)); final List<String> events = Files.readAllLines(PathUtils.get(path));
assertThat(events.size(), equalTo(5)); assertThat(events.size(), equalTo(5));
@ -164,9 +164,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
barrier.await(); barrier.await();
final String deprecationPath = final String deprecationPath =
System.getProperty("es.logs.base_path") + System.getProperty("opensearch.logs.base_path") +
System.getProperty("file.separator") + System.getProperty("file.separator") +
System.getProperty("es.logs.cluster_name") + System.getProperty("opensearch.logs.cluster_name") +
"_deprecation.log"; "_deprecation.log";
final List<String> deprecationEvents = Files.readAllLines(PathUtils.get(deprecationPath)); final List<String> deprecationEvents = Files.readAllLines(PathUtils.get(deprecationPath));
// we appended an integer to each log message, use that for sorting // we appended an integer to each log message, use that for sorting
@ -200,9 +200,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
} }
final String deprecationPath = final String deprecationPath =
System.getProperty("es.logs.base_path") + System.getProperty("opensearch.logs.base_path") +
System.getProperty("file.separator") + System.getProperty("file.separator") +
System.getProperty("es.logs.cluster_name") + System.getProperty("opensearch.logs.cluster_name") +
"_deprecation.log"; "_deprecation.log";
final List<String> deprecationEvents = Files.readAllLines(PathUtils.get(deprecationPath)); final List<String> deprecationEvents = Files.readAllLines(PathUtils.get(deprecationPath));
if (iterations > 0) { if (iterations > 0) {
@ -241,9 +241,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
logger.info(new ParameterizedMessage("{}", "test"), e); logger.info(new ParameterizedMessage("{}", "test"), e);
final String path = final String path =
System.getProperty("es.logs.base_path") + System.getProperty("opensearch.logs.base_path") +
System.getProperty("file.separator") + System.getProperty("file.separator") +
System.getProperty("es.logs.cluster_name") + System.getProperty("opensearch.logs.cluster_name") +
".log"; ".log";
final List<String> events = Files.readAllLines(PathUtils.get(path)); final List<String> events = Files.readAllLines(PathUtils.get(path));
@ -279,10 +279,10 @@ public class EvilLoggerTests extends OpenSearchTestCase {
.build(); .build();
setupLogging("minimal", settings); setupLogging("minimal", settings);
assertNotNull(System.getProperty("es.logs.base_path")); assertNotNull(System.getProperty("opensearch.logs.base_path"));
assertThat(System.getProperty("es.logs.cluster_name"), equalTo(ClusterName.CLUSTER_NAME_SETTING.get(settings).value())); assertThat(System.getProperty("opensearch.logs.cluster_name"), equalTo(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()));
assertThat(System.getProperty("es.logs.node_name"), equalTo(Node.NODE_NAME_SETTING.get(settings))); assertThat(System.getProperty("opensearch.logs.node_name"), equalTo(Node.NODE_NAME_SETTING.get(settings)));
} }
public void testNoNodeNameInPatternWarning() throws IOException, UserException { public void testNoNodeNameInPatternWarning() throws IOException, UserException {
@ -290,9 +290,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
LogConfigurator.setNodeName(nodeName); LogConfigurator.setNodeName(nodeName);
setupLogging("no_node_name"); setupLogging("no_node_name");
final String path = final String path =
System.getProperty("es.logs.base_path") + System.getProperty("opensearch.logs.base_path") +
System.getProperty("file.separator") + System.getProperty("file.separator") +
System.getProperty("es.logs.cluster_name") + ".log"; System.getProperty("opensearch.logs.cluster_name") + ".log";
final List<String> events = Files.readAllLines(PathUtils.get(path)); final List<String> events = Files.readAllLines(PathUtils.get(path));
assertThat(events.size(), equalTo(2)); assertThat(events.size(), equalTo(2));
final String location = "org.opensearch.common.logging.LogConfigurator"; final String location = "org.opensearch.common.logging.LogConfigurator";

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
@ -21,7 +21,7 @@ logger.test.additivity = false
appender.deprecation_file.type = File appender.deprecation_file.type = File
appender.deprecation_file.name = deprecation_file appender.deprecation_file.name = deprecation_file
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.log
appender.deprecation_file.layout.type = PatternLayout appender.deprecation_file.layout.type = PatternLayout
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
@ -15,7 +15,7 @@ rootLogger.appenderRef.file.ref = file
appender.deprecation_file.type = File appender.deprecation_file.type = File
appender.deprecation_file.name = deprecation_file appender.deprecation_file.name = deprecation_file
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.log
appender.deprecation_file.layout.type = PatternLayout appender.deprecation_file.layout.type = PatternLayout
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%p][%l] %marker%m%n appender.file.layout.pattern = [%p][%l] %marker%m%n
@ -15,7 +15,7 @@ rootLogger.appenderRef.file.ref = file
appender.deprecation_file.type = File appender.deprecation_file.type = File
appender.deprecation_file.name = deprecation_file appender.deprecation_file.name = deprecation_file
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.log
appender.deprecation_file.layout.type = PatternLayout appender.deprecation_file.layout.type = PatternLayout
appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n
appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%test_thread_info]%marker %m%n appender.file.layout.pattern = [%test_thread_info]%marker %m%n

View File

@ -5,7 +5,7 @@ appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = PatternLayout appender.file.layout.type = PatternLayout
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
@ -15,7 +15,7 @@ rootLogger.appenderRef.file.ref = file
appender.deprecation_file.type = File appender.deprecation_file.type = File
appender.deprecation_file.name = deprecation_file appender.deprecation_file.name = deprecation_file
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.log
appender.deprecation_file.layout.type = PatternLayout appender.deprecation_file.layout.type = PatternLayout
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter appender.deprecation_file.filter.rate_limit.type = RateLimitingFilter

View File

@ -7,11 +7,11 @@ logger.action.level = debug
appender.rolling.type = RollingFile appender.rolling.type = RollingFile
appender.rolling.name = rolling appender.rolling.name = rolling
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.log appender.rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_server.log
appender.rolling.layout.type = PatternLayout appender.rolling.layout.type = PatternLayout
appender.rolling.layout.pattern =%notEmpty{%node_name} %notEmpty{%node_and_cluster_id} %notEmpty{${sys:es.logs.cluster_name}} %m%n appender.rolling.layout.pattern =%notEmpty{%node_name} %notEmpty{%node_and_cluster_id} %notEmpty{${sys:opensearch.logs.cluster_name}} %m%n
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz appender.rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz
appender.rolling.policies.type = Policies appender.rolling.policies.type = Policies
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling.policies.time.interval = 1 appender.rolling.policies.time.interval = 1
@ -21,9 +21,9 @@ appender.rolling.policies.size.size = 128MB
appender.rolling.strategy.type = DefaultRolloverStrategy appender.rolling.strategy.type = DefaultRolloverStrategy
appender.rolling.strategy.fileIndex = nomax appender.rolling.strategy.fileIndex = nomax
appender.rolling.strategy.action.type = Delete appender.rolling.strategy.action.type = Delete
appender.rolling.strategy.action.basepath = ${sys:es.logs.base_path} appender.rolling.strategy.action.basepath = ${sys:opensearch.logs.base_path}
appender.rolling.strategy.action.condition.type = IfFileName appender.rolling.strategy.action.condition.type = IfFileName
appender.rolling.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-* appender.rolling.strategy.action.condition.glob = ${sys:opensearch.logs.cluster_name}-*
appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB

View File

@ -46,7 +46,7 @@ public class ESJsonLayoutTests extends OpenSearchTestCase {
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " + "\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " + "\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " + "\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " + "\"cluster.name\": \"${sys:opensearch.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\", " + "\"node.name\": \"%node_name\", " +
"\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" + "\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" +
"%notEmpty{, %node_and_cluster_id }" + "%notEmpty{, %node_and_cluster_id }" +
@ -66,7 +66,7 @@ public class ESJsonLayoutTests extends OpenSearchTestCase {
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " + "\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " + "\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " + "\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " + "\"cluster.name\": \"${sys:opensearch.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\", " + "\"node.name\": \"%node_name\", " +
"\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" + "\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" +
"%notEmpty{, \"x-opaque-id\": \"%ESMessageField{x-opaque-id}\"}" + "%notEmpty{, \"x-opaque-id\": \"%ESMessageField{x-opaque-id}\"}" +
@ -88,7 +88,7 @@ public class ESJsonLayoutTests extends OpenSearchTestCase {
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " + "\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " + "\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " + "\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " + "\"cluster.name\": \"${sys:opensearch.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\"" + "\"node.name\": \"%node_name\"" +
"%notEmpty{, \"message\": \"%ESMessageField{message}\"}" + "%notEmpty{, \"message\": \"%ESMessageField{message}\"}" +
"%notEmpty{, %node_and_cluster_id }" + "%notEmpty{, %node_and_cluster_id }" +

View File

@ -86,8 +86,8 @@ public class JsonLoggerTests extends OpenSearchTestCase {
testLogger.deprecate("someKey", "deprecated message1"); testLogger.deprecate("someKey", "deprecated message1");
final Path path = PathUtils.get( final Path path = PathUtils.get(
System.getProperty("es.logs.base_path"), System.getProperty("opensearch.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json" System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json"
); );
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) { try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
@ -122,8 +122,8 @@ public class JsonLoggerTests extends OpenSearchTestCase {
testLogger.info(new DeprecatedMessage("key", null, "deprecated message3")); testLogger.info(new DeprecatedMessage("key", null, "deprecated message3"));
testLogger.info("deprecated message4"); testLogger.info("deprecated message4");
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"), final Path path = PathUtils.get(System.getProperty("opensearch.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json"); System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json");
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) { try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
List<Map<String, String>> jsonLogs = stream List<Map<String, String>> jsonLogs = stream
.collect(Collectors.toList()); .collect(Collectors.toList());
@ -283,8 +283,8 @@ public class JsonLoggerTests extends OpenSearchTestCase {
deprecationLogger.deprecate("key", "message2"); deprecationLogger.deprecate("key", "message2");
assertWarnings("message1", "message2"); assertWarnings("message1", "message2");
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"), final Path path = PathUtils.get(System.getProperty("opensearch.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json"); System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json");
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) { try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
List<Map<String, String>> jsonLogs = stream List<Map<String, String>> jsonLogs = stream
.collect(Collectors.toList()); .collect(Collectors.toList());
@ -312,8 +312,8 @@ public class JsonLoggerTests extends OpenSearchTestCase {
assertWarnings("message1", "message2"); assertWarnings("message1", "message2");
final Path path = PathUtils.get( final Path path = PathUtils.get(
System.getProperty("es.logs.base_path"), System.getProperty("opensearch.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json" System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json"
); );
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) { try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
List<Map<String, String>> jsonLogs = stream.collect(Collectors.toList()); List<Map<String, String>> jsonLogs = stream.collect(Collectors.toList());
@ -352,7 +352,7 @@ public class JsonLoggerTests extends OpenSearchTestCase {
} }
private Path clusterLogsPath() { private Path clusterLogsPath() {
return PathUtils.get(System.getProperty("es.logs.base_path"), System.getProperty("es.logs.cluster_name") + ".log"); return PathUtils.get(System.getProperty("opensearch.logs.base_path"), System.getProperty("opensearch.logs.cluster_name") + ".log");
} }
private void setupLogging(final String config) throws IOException, UserException { private void setupLogging(final String config) throws IOException, UserException {

View File

@ -5,13 +5,13 @@ appender.console.layout.type_name = console
appender.file.type = File appender.file.type = File
appender.file.name = file appender.file.name = file
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log
appender.file.layout.type = ESJsonLayout appender.file.layout.type = ESJsonLayout
appender.file.layout.type_name = file appender.file.layout.type_name = file
appender.deprecated.type = File appender.deprecated.type = File
appender.deprecated.name = deprecated appender.deprecated.name = deprecated
appender.deprecated.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecated.json appender.deprecated.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecated.json
appender.deprecated.layout.type = ESJsonLayout appender.deprecated.layout.type = ESJsonLayout
appender.deprecated.layout.type_name = deprecation appender.deprecated.layout.type_name = deprecation
appender.deprecated.layout.esmessagefields = x-opaque-id appender.deprecated.layout.esmessagefields = x-opaque-id
@ -26,7 +26,7 @@ appender.deprecatedconsole.filter.rate_limit.type = RateLimitingFilter
appender.index_search_slowlog_rolling.type = File appender.index_search_slowlog_rolling.type = File
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\ appender.index_search_slowlog_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs\
.cluster_name}_index_search_slowlog.json .cluster_name}_index_search_slowlog.json
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog

View File

@ -3,5 +3,5 @@ appender.console.name = console
appender.console.layout.type = PatternLayout appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%test_thread_info]%marker %m%n appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%test_thread_info]%marker %m%n
rootLogger.level = ${sys:tests.es.logger.level:-info} rootLogger.level = ${sys:tests.opensearch.logger.level:-info}
rootLogger.appenderRef.console.ref = console rootLogger.appenderRef.console.ref = console

View File

@ -32,5 +32,5 @@ integTest {
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty. * other if we allow them to set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'opensearch.set.netty.runtime.available.processors', 'false'
} }

View File

@ -35,7 +35,7 @@ import java.util.Map;
* first, to be efficient at finding the most recent documents too. * first, to be efficient at finding the most recent documents too.
*/ */
public class ShuffleForcedMergePolicy extends FilterMergePolicy { public class ShuffleForcedMergePolicy extends FilterMergePolicy {
private static final String SHUFFLE_MERGE_KEY = "es.shuffle_merge"; private static final String SHUFFLE_MERGE_KEY = "opensearch.shuffle_merge";
public ShuffleForcedMergePolicy(MergePolicy in) { public ShuffleForcedMergePolicy(MergePolicy in) {
super(in); super(in);

View File

@ -93,7 +93,7 @@ public class Build {
final String version; final String version;
// these are parsed at startup, and we require that we are able to recognize the values passed in by the startup scripts // these are parsed at startup, and we require that we are able to recognize the values passed in by the startup scripts
type = Type.fromDisplayName(System.getProperty("es.distribution.type", "unknown"), true); type = Type.fromDisplayName(System.getProperty("opensearch.distribution.type", "unknown"), true);
final String opensearchPrefix = "opensearch-" + Version.CURRENT; final String opensearchPrefix = "opensearch-" + Version.CURRENT;
final URL url = getOpenSearchCodeSourceLocation(); final URL url = getOpenSearchCodeSourceLocation();

View File

@ -78,11 +78,11 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip"; public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip";
public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true; public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true;
private static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false; private static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
private static final String INDEX_METADATA_KEY = "es.index"; private static final String INDEX_METADATA_KEY = "opensearch.index";
private static final String INDEX_METADATA_KEY_UUID = "es.index_uuid"; private static final String INDEX_METADATA_KEY_UUID = "opensearch.index_uuid";
private static final String SHARD_METADATA_KEY = "es.shard"; private static final String SHARD_METADATA_KEY = "opensearch.shard";
private static final String RESOURCE_METADATA_TYPE_KEY = "es.resource.type"; private static final String RESOURCE_METADATA_TYPE_KEY = "opensearch.resource.type";
private static final String RESOURCE_METADATA_ID_KEY = "es.resource.id"; private static final String RESOURCE_METADATA_ID_KEY = "opensearch.resource.id";
private static final String TYPE = "type"; private static final String TYPE = "type";
private static final String REASON = "reason"; private static final String REASON = "reason";
@ -153,9 +153,9 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
* If the provided key is already present, the corresponding metadata will be replaced * If the provided key is already present, the corresponding metadata will be replaced
*/ */
public void addMetadata(String key, List<String> values) { public void addMetadata(String key, List<String> values) {
//we need to enforce this otherwise bw comp doesn't work properly, as "es." was the previous criteria to split headers in two sets //we need to enforce this otherwise bw comp doesn't work properly, as "opensearch." was the previous criteria to split headers in two sets
if (key.startsWith("es.") == false) { if (key.startsWith("opensearch.") == false) {
throw new IllegalArgumentException("exception metadata must start with [es.], found [" + key + "] instead"); throw new IllegalArgumentException("exception metadata must start with [opensearch.], found [" + key + "] instead");
} }
this.metadata.put(key, values); this.metadata.put(key, values);
} }
@ -184,9 +184,9 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
* This method will replace existing header if a header with the same key already exists * This method will replace existing header if a header with the same key already exists
*/ */
public void addHeader(String key, List<String> value) { public void addHeader(String key, List<String> value) {
//we need to enforce this otherwise bw comp doesn't work properly, as "es." was the previous criteria to split headers in two sets //we need to enforce this otherwise bw comp doesn't work properly, as "opensearch." was the previous criteria to split headers in two sets
if (key.startsWith("es.")) { if (key.startsWith("opensearch.")) {
throw new IllegalArgumentException("exception headers must not start with [es.], found [" + key + "] instead"); throw new IllegalArgumentException("exception headers must not start with [opensearch.], found [" + key + "] instead");
} }
this.headers.put(key, value); this.headers.put(key, value);
} }
@ -333,7 +333,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
builder.field(REASON, message); builder.field(REASON, message);
for (Map.Entry<String, List<String>> entry : metadata.entrySet()) { for (Map.Entry<String, List<String>> entry : metadata.entrySet()) {
headerToXContent(builder, entry.getKey().substring("es.".length()), entry.getValue()); headerToXContent(builder, entry.getKey().substring("opensearch.".length()), entry.getValue());
} }
if (throwable instanceof OpenSearchException) { if (throwable instanceof OpenSearchException) {
@ -497,11 +497,11 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
for (Map.Entry<String, List<String>> entry : metadata.entrySet()) { for (Map.Entry<String, List<String>> entry : metadata.entrySet()) {
//subclasses can print out additional metadata through the metadataToXContent method. Simple key-value pairs will be //subclasses can print out additional metadata through the metadataToXContent method. Simple key-value pairs will be
//parsed back and become part of this metadata set, while objects and arrays are not supported when parsing back. //parsed back and become part of this metadata set, while objects and arrays are not supported when parsing back.
//Those key-value pairs become part of the metadata set and inherit the "es." prefix as that is currently required //Those key-value pairs become part of the metadata set and inherit the "opensearch." prefix as that is currently required
//by addMetadata. The prefix will get stripped out when printing metadata out so it will be effectively invisible. //by addMetadata. The prefix will get stripped out when printing metadata out so it will be effectively invisible.
//TODO move subclasses that print out simple metadata to using addMetadata directly and support also numbers and booleans. //TODO move subclasses that print out simple metadata to using addMetadata directly and support also numbers and booleans.
//TODO rename metadataToXContent and have only SearchPhaseExecutionException use it, which prints out complex objects //TODO rename metadataToXContent and have only SearchPhaseExecutionException use it, which prints out complex objects
e.addMetadata("es." + entry.getKey(), entry.getValue()); e.addMetadata("opensearch." + entry.getKey(), entry.getValue());
} }
for (Map.Entry<String, List<String>> header : headers.entrySet()) { for (Map.Entry<String, List<String>> header : headers.entrySet()) {
e.addHeader(header.getKey(), header.getValue()); e.addHeader(header.getKey(), header.getValue());

View File

@ -50,9 +50,9 @@ public class TransportClusterStateAction extends TransportMasterNodeReadAction<C
private final Logger logger = LogManager.getLogger(getClass()); private final Logger logger = LogManager.getLogger(getClass());
static { static {
final String property = System.getProperty("es.cluster_state.size"); final String property = System.getProperty("opensearch.cluster_state.size");
if (property != null) { if (property != null) {
throw new IllegalArgumentException("es.cluster_state.size is no longer respected but was [" + property + "]"); throw new IllegalArgumentException("opensearch.cluster_state.size is no longer respected but was [" + property + "]");
} }
} }

View File

@ -57,7 +57,7 @@ import static org.opensearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_
/** /**
* We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface or if the system property {@code * We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface or if the system property {@code
* es.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and all bootstrap checks must * opensearch.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and all bootstrap checks must
* pass. * pass.
*/ */
final class BootstrapChecks { final class BootstrapChecks {
@ -65,11 +65,11 @@ final class BootstrapChecks {
private BootstrapChecks() { private BootstrapChecks() {
} }
static final String ES_ENFORCE_BOOTSTRAP_CHECKS = "es.enforce.bootstrap.checks"; static final String OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS = "opensearch.enforce.bootstrap.checks";
/** /**
* Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. If the system property * Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. If the system property
* {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether or not * {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether or not
* the transport protocol is bound to a non-loopback interface. * the transport protocol is bound to a non-loopback interface.
* *
* @param context the current node bootstrap context * @param context the current node bootstrap context
@ -87,7 +87,7 @@ final class BootstrapChecks {
/** /**
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
* property {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether * property {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether
* or not the transport protocol is bound to a non-loopback interface. * or not the transport protocol is bound to a non-loopback interface.
* *
* @param context the current node boostrap context * @param context the current node boostrap context
@ -103,7 +103,7 @@ final class BootstrapChecks {
/** /**
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
* property {@code es.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced regardless of whether * property {@code opensearch.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced regardless of whether
* or not the transport protocol is bound to a non-loopback interface. * or not the transport protocol is bound to a non-loopback interface.
* *
* @param context the current node boostrap context * @param context the current node boostrap context
@ -119,7 +119,7 @@ final class BootstrapChecks {
final List<String> errors = new ArrayList<>(); final List<String> errors = new ArrayList<>();
final List<String> ignoredErrors = new ArrayList<>(); final List<String> ignoredErrors = new ArrayList<>();
final String esEnforceBootstrapChecks = System.getProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); final String esEnforceBootstrapChecks = System.getProperty(OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS);
final boolean enforceBootstrapChecks; final boolean enforceBootstrapChecks;
if (esEnforceBootstrapChecks == null) { if (esEnforceBootstrapChecks == null) {
enforceBootstrapChecks = false; enforceBootstrapChecks = false;
@ -130,7 +130,7 @@ final class BootstrapChecks {
String.format( String.format(
Locale.ROOT, Locale.ROOT,
"[%s] must be [true] but was [%s]", "[%s] must be [true] but was [%s]",
ES_ENFORCE_BOOTSTRAP_CHECKS, OPENSEARCH_ENFORCE_BOOTSTRAP_CHECKS,
esEnforceBootstrapChecks); esEnforceBootstrapChecks);
throw new IllegalArgumentException(message); throw new IllegalArgumentException(message);
} }

View File

@ -111,13 +111,13 @@ final class StartupException extends RuntimeException {
// if its a guice exception, the whole thing really will not be in the log, its megabytes. // if its a guice exception, the whole thing really will not be in the log, its megabytes.
// refer to the hack in bootstrap, where we don't log it // refer to the hack in bootstrap, where we don't log it
if (originalCause instanceof CreationException == false) { if (originalCause instanceof CreationException == false) {
final String basePath = System.getProperty("es.logs.base_path"); final String basePath = System.getProperty("opensearch.logs.base_path");
// It's possible to fail before logging has been configured, in which case there's no point // It's possible to fail before logging has been configured, in which case there's no point
// suggested that the user look in the log file. // suggested that the user look in the log file.
if (basePath != null) { if (basePath != null) {
final String logPath = System.getProperty("es.logs.base_path") final String logPath = System.getProperty("opensearch.logs.base_path")
+ System.getProperty("file.separator") + System.getProperty("file.separator")
+ System.getProperty("es.logs.cluster_name") + System.getProperty("opensearch.logs.cluster_name")
+ ".log"; + ".log";
consumer.accept("For complete error details, refer to the log at " + logPath); consumer.accept("For complete error details, refer to the log at " + logPath);

View File

@ -24,18 +24,18 @@ import org.opensearch.common.logging.LogConfigurator;
import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings;
/** /**
* Holder class for method to configure logging without Elasticsearch configuration files for use in CLI tools that will not read such * Holder class for method to configure logging without OpenSearch configuration files for use in CLI tools that will not read such
* files. * files.
*/ */
public final class CommandLoggingConfigurator { public final class CommandLoggingConfigurator {
/** /**
* Configures logging without Elasticsearch configuration files based on the system property "es.logger.level" only. As such, any * Configures logging without OpenSearch configuration files based on the system property "opensearch.logger.level" only. As such, any
* logging will be written to the console. * logging will be written to the console.
*/ */
public static void configureLoggingWithoutConfig() { public static void configureLoggingWithoutConfig() {
// initialize default for es.logger.level because we will not read the log4j2.properties // initialize default for es.logger.level because we will not read the log4j2.properties
final String loggerLevel = System.getProperty("es.logger.level", Level.INFO.name()); final String loggerLevel = System.getProperty("opensearch.logger.level", Level.INFO.name());
final Settings settings = Settings.builder().put("logger.level", loggerLevel).build(); final Settings settings = Settings.builder().put("logger.level", loggerLevel).build();
LogConfigurator.configureWithoutConfig(settings); LogConfigurator.configureWithoutConfig(settings);
} }

View File

@ -79,9 +79,9 @@ public abstract class EnvironmentAwareCommand extends Command {
settings.put(kvp.key, kvp.value); settings.put(kvp.key, kvp.value);
} }
putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data"); putSystemPropertyIfSettingIsMissing(settings, "path.data", "opensearch.path.data");
putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home"); putSystemPropertyIfSettingIsMissing(settings, "path.home", "opensearch.path.home");
putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs"); putSystemPropertyIfSettingIsMissing(settings, "path.logs", "opensearch.path.logs");
execute(terminal, options, createEnv(settings)); execute(terminal, options, createEnv(settings));
} }
@ -93,9 +93,9 @@ public abstract class EnvironmentAwareCommand extends Command {
/** Create an {@link Environment} for the command to use. Overrideable for tests. */ /** Create an {@link Environment} for the command to use. Overrideable for tests. */
protected final Environment createEnv(final Settings baseSettings, final Map<String, String> settings) throws UserException { protected final Environment createEnv(final Settings baseSettings, final Map<String, String> settings) throws UserException {
final String esPathConf = System.getProperty("es.path.conf"); final String esPathConf = System.getProperty("opensearch.path.conf");
if (esPathConf == null) { if (esPathConf == null) {
throw new UserException(ExitCodes.CONFIG, "the system property [es.path.conf] must be set"); throw new UserException(ExitCodes.CONFIG, "the system property [opensearch.path.conf] must be set");
} }
return InternalSettingsPreparer.prepareEnvironment(baseSettings, settings, return InternalSettingsPreparer.prepareEnvironment(baseSettings, settings,
getConfigPath(esPathConf), getConfigPath(esPathConf),

View File

@ -138,9 +138,9 @@ public class IndexMetadata implements Diffable<IndexMetadata>, ToXContentFragmen
* if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards
* per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially
* kills the entire cluster with OOM on the spot.*/ * kills the entire cluster with OOM on the spot.*/
final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); final int maxNumShards = Integer.parseInt(System.getProperty("opensearch.index.max_number_of_shards", "1024"));
if (maxNumShards < 1) { if (maxNumShards < 1) {
throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); throw new IllegalArgumentException("opensearch.index.max_number_of_shards must be > 0");
} }
return Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 1, 1, maxNumShards, Property.IndexScope, Property.Final); return Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 1, 1, maxNumShards, Property.IndexScope, Property.Final);
} }

View File

@ -65,7 +65,7 @@ import java.util.stream.StreamSupport;
public class IndexNameExpressionResolver { public class IndexNameExpressionResolver {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class);
public static final String EXCLUDED_DATA_STREAMS_KEY = "es.excluded_ds"; public static final String EXCLUDED_DATA_STREAMS_KEY = "opensearch.excluded_ds";
public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed";
public static final Version SYSTEM_INDEX_ENFORCEMENT_VERSION = Version.V_7_10_0; public static final Version SYSTEM_INDEX_ENFORCEMENT_VERSION = Version.V_7_10_0;

View File

@ -52,7 +52,7 @@ public class OperationRouting {
Setting.Property.Dynamic, Setting.Property.NodeScope); Setting.Property.Dynamic, Setting.Property.NodeScope);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(OperationRouting.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(OperationRouting.class);
private static final String IGNORE_AWARENESS_ATTRIBUTES_PROPERTY = "es.search.ignore_awareness_attributes"; private static final String IGNORE_AWARENESS_ATTRIBUTES_PROPERTY = "opensearch.search.ignore_awareness_attributes";
static final String IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE = static final String IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE =
"searches will not be routed based on awareness attributes starting in version 8.0.0; " + "searches will not be routed based on awareness attributes starting in version 8.0.0; " +
"to opt into this behaviour now please set the system property [" + IGNORE_AWARENESS_ATTRIBUTES_PROPERTY + "] to [true]"; "to opt into this behaviour now please set the system property [" + IGNORE_AWARENESS_ATTRIBUTES_PROPERTY + "] to [true]";

View File

@ -75,7 +75,7 @@ public class DiskThresholdSettings {
private volatile Double freeDiskThresholdFloodStage; private volatile Double freeDiskThresholdFloodStage;
private volatile ByteSizeValue freeBytesThresholdFloodStage; private volatile ByteSizeValue freeBytesThresholdFloodStage;
private static final boolean autoReleaseIndexEnabled; private static final boolean autoReleaseIndexEnabled;
public static final String AUTO_RELEASE_INDEX_ENABLED_KEY = "es.disk.auto_release_flood_stage_block"; public static final String AUTO_RELEASE_INDEX_ENABLED_KEY = "opensearch.disk.auto_release_flood_stage_block";
static { static {
final String property = System.getProperty(AUTO_RELEASE_INDEX_ENABLED_KEY); final String property = System.getProperty(AUTO_RELEASE_INDEX_ENABLED_KEY);

View File

@ -28,7 +28,7 @@ import java.util.Arrays;
public class DiskIoBufferPool { public class DiskIoBufferPool {
public static final int BUFFER_SIZE = StrictMath.toIntExact(ByteSizeValue.parseBytesSizeValue( public static final int BUFFER_SIZE = StrictMath.toIntExact(ByteSizeValue.parseBytesSizeValue(
System.getProperty("es.disk_io.direct.buffer.size", "64KB"), "es.disk_io.direct.buffer.size").getBytes()); System.getProperty("opensearch.disk_io.direct.buffer.size", "64KB"), "opensearch.disk_io.direct.buffer.size").getBytes());
public static final int HEAP_BUFFER_SIZE = 8 * 1024; public static final int HEAP_BUFFER_SIZE = 8 * 1024;
private static final ThreadLocal<ByteBuffer> ioBufferPool = ThreadLocal.withInitial(() -> { private static final ThreadLocal<ByteBuffer> ioBufferPool = ThreadLocal.withInitial(() -> {

View File

@ -50,8 +50,8 @@ import java.util.stream.Stream;
* <li>timestamp - ISO8601 with additional timezone ID</li> * <li>timestamp - ISO8601 with additional timezone ID</li>
* <li>level - INFO, WARN etc</li> * <li>level - INFO, WARN etc</li>
* <li>component - logger name, most of the times class name</li> * <li>component - logger name, most of the times class name</li>
* <li>cluster.name - taken from sys:es.logs.cluster_name system property because it is always set</li> * <li>cluster.name - taken from sys:opensearch.logs.cluster_name system property because it is always set</li>
* <li>node.name - taken from NodeNamePatternConverter, as it can be set in runtime as hostname when not set in opensearch.yml</li> * <li>node.name - taken from NodeNamePatternConverter, as it can be set in runtime as hostname when not set in elasticsearch.yml</li>
* <li>node_and_cluster_id - in json as node.id and cluster.uuid - taken from NodeAndClusterIdConverter and present * <li>node_and_cluster_id - in json as node.id and cluster.uuid - taken from NodeAndClusterIdConverter and present
* once clusterStateUpdate is first received</li> * once clusterStateUpdate is first received</li>
* <li>message - a json escaped message. Multiline messages will be converted to single line with new line explicitly * <li>message - a json escaped message. Multiline messages will be converted to single line with new line explicitly
@ -91,7 +91,7 @@ public class ESJsonLayout extends AbstractStringLayout {
map.put("timestamp", inQuotes("%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}")); map.put("timestamp", inQuotes("%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}"));
map.put("level", inQuotes("%p")); map.put("level", inQuotes("%p"));
map.put("component", inQuotes("%c{1.}")); map.put("component", inQuotes("%c{1.}"));
map.put("cluster.name", inQuotes("${sys:es.logs.cluster_name}")); map.put("cluster.name", inQuotes("${sys:opensearch.logs.cluster_name}"));
map.put("node.name", inQuotes("%node_name")); map.put("node.name", inQuotes("%node_name"));
map.put("message", inQuotes("%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}")); map.put("message", inQuotes("%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}"));

View File

@ -282,13 +282,13 @@ public class LogConfigurator {
* properties here: * properties here:
* <ul> * <ul>
* <li> * <li>
* {@code es.logs.base_path} the base path containing the log files * {@code opensearch.logs.base_path} the base path containing the log files
* </li> * </li>
* <li> * <li>
* {@code es.logs.cluster_name} the cluster name, used as the prefix of log filenames in the default configuration * {@code opensearch.logs.cluster_name} the cluster name, used as the prefix of log filenames in the default configuration
* </li> * </li>
* <li> * <li>
* {@code es.logs.node_name} the node name, can be used as part of log filenames * {@code opensearch.logs.node_name} the node name, can be used as part of log filenames
* </li> * </li>
* </ul> * </ul>
* *
@ -297,9 +297,9 @@ public class LogConfigurator {
*/ */
@SuppressForbidden(reason = "sets system property for logging configuration") @SuppressForbidden(reason = "sets system property for logging configuration")
private static void setLogConfigurationSystemProperty(final Path logsPath, final Settings settings) { private static void setLogConfigurationSystemProperty(final Path logsPath, final Settings settings) {
System.setProperty("es.logs.base_path", logsPath.toString()); System.setProperty("opensearch.logs.base_path", logsPath.toString());
System.setProperty("es.logs.cluster_name", ClusterName.CLUSTER_NAME_SETTING.get(settings).value()); System.setProperty("opensearch.logs.cluster_name", ClusterName.CLUSTER_NAME_SETTING.get(settings).value());
System.setProperty("es.logs.node_name", Node.NODE_NAME_SETTING.get(settings)); System.setProperty("opensearch.logs.node_name", Node.NODE_NAME_SETTING.get(settings));
} }
} }

View File

@ -35,7 +35,7 @@ import java.util.Set;
public abstract class SecureSetting<T> extends Setting<T> { public abstract class SecureSetting<T> extends Setting<T> {
/** Determines whether legacy settings with sensitive values should be allowed. */ /** Determines whether legacy settings with sensitive values should be allowed. */
private static final boolean ALLOW_INSECURE_SETTINGS = Booleans.parseBoolean(System.getProperty("es.allow_insecure_settings", "false")); private static final boolean ALLOW_INSECURE_SETTINGS = Booleans.parseBoolean(System.getProperty("opensearch.allow_insecure_settings", "false"));
private static final Set<Property> ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated, Property.Consistent); private static final Set<Property> ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated, Property.Consistent);

View File

@ -36,7 +36,7 @@ public class HttpInfo implements ReportingService.Info {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(HttpInfo.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(HttpInfo.class);
/** Deprecated property, just here for deprecation logging in 7.x. */ /** Deprecated property, just here for deprecation logging in 7.x. */
private static final boolean CNAME_IN_PUBLISH_HOST = System.getProperty("es.http.cname_in_publish_address") != null; private static final boolean CNAME_IN_PUBLISH_HOST = System.getProperty("opensearch.http.cname_in_publish_address") != null;
private final BoundTransportAddress address; private final BoundTransportAddress address;
private final long maxContentLength; private final long maxContentLength;
@ -74,7 +74,7 @@ public class HttpInfo implements ReportingService.Info {
if (CNAME_IN_PUBLISH_HOST) { if (CNAME_IN_PUBLISH_HOST) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
"cname_in_publish_address", "cname_in_publish_address",
"es.http.cname_in_publish_address system property is deprecated and no longer affects http.publish_address " + "opensearch.http.cname_in_publish_address system property is deprecated and no longer affects http.publish_address " +
"formatting. Remove this property to get rid of this deprecation warning." "formatting. Remove this property to get rid of this deprecation warning."
); );
} }

View File

@ -160,7 +160,7 @@ public final class EngineConfig {
// Add an escape hatch in case this change proves problematic - it used // Add an escape hatch in case this change proves problematic - it used
// to be a fixed amound of RAM: 256 MB. // to be a fixed amound of RAM: 256 MB.
// TODO: Remove this escape hatch in 8.x // TODO: Remove this escape hatch in 8.x
final String escapeHatchProperty = "es.index.memory.max_index_buffer_size"; final String escapeHatchProperty = "opensearch.index.memory.max_index_buffer_size";
String maxBufferSize = System.getProperty(escapeHatchProperty); String maxBufferSize = System.getProperty(escapeHatchProperty);
if (maxBufferSize != null) { if (maxBufferSize != null) {
indexingBufferSize = MemorySizeValue.parseBytesSizeValueOrHeapRatio(maxBufferSize, escapeHatchProperty); indexingBufferSize = MemorySizeValue.parseBytesSizeValueOrHeapRatio(maxBufferSize, escapeHatchProperty);

View File

@ -2312,7 +2312,7 @@ public class InternalEngine extends Engine {
new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery, new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery,
new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME))); new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME)));
} }
boolean shuffleForcedMerge = Booleans.parseBoolean(System.getProperty("es.shuffle_forced_merge", Boolean.TRUE.toString())); boolean shuffleForcedMerge = Booleans.parseBoolean(System.getProperty("opensearch.shuffle_forced_merge", Boolean.TRUE.toString()));
if (shuffleForcedMerge) { if (shuffleForcedMerge) {
// We wrap the merge policy for all indices even though it is mostly useful for time-based indices // We wrap the merge policy for all indices even though it is mostly useful for time-based indices
// but there should be no overhead for other type of indices so it's simpler than adding a setting // but there should be no overhead for other type of indices so it's simpler than adding a setting

View File

@ -34,16 +34,16 @@ import org.apache.lucene.search.similarities.Similarity;
public final class NonNegativeScoresSimilarity extends Similarity { public final class NonNegativeScoresSimilarity extends Similarity {
// Escape hatch // Escape hatch
private static final String ES_ENFORCE_POSITIVE_SCORES = "es.enforce.positive.scores"; private static final String OPENSEARCH_ENFORCE_POSITIVE_SCORES = "opensearch.enforce.positive.scores";
private static final boolean ENFORCE_POSITIVE_SCORES; private static final boolean ENFORCE_POSITIVE_SCORES;
static { static {
String enforcePositiveScores = System.getProperty(ES_ENFORCE_POSITIVE_SCORES); String enforcePositiveScores = System.getProperty(OPENSEARCH_ENFORCE_POSITIVE_SCORES);
if (enforcePositiveScores == null) { if (enforcePositiveScores == null) {
ENFORCE_POSITIVE_SCORES = true; ENFORCE_POSITIVE_SCORES = true;
} else if ("false".equals(enforcePositiveScores)) { } else if ("false".equals(enforcePositiveScores)) {
ENFORCE_POSITIVE_SCORES = false; ENFORCE_POSITIVE_SCORES = false;
} else { } else {
throw new IllegalArgumentException(ES_ENFORCE_POSITIVE_SCORES + " may only be unset or set to [false], but got [" + throw new IllegalArgumentException(OPENSEARCH_ENFORCE_POSITIVE_SCORES + " may only be unset or set to [false], but got [" +
enforcePositiveScores + "]"); enforcePositiveScores + "]");
} }
} }

View File

@ -362,9 +362,9 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
JvmInfo jvmInfo = JvmInfo.jvmInfo(); JvmInfo jvmInfo = JvmInfo.jvmInfo();
if (trackRealMemoryUsage && jvmInfo.useG1GC().equals("true") if (trackRealMemoryUsage && jvmInfo.useG1GC().equals("true")
// messing with GC is "dangerous" so we apply an escape hatch. Not intended to be used. // messing with GC is "dangerous" so we apply an escape hatch. Not intended to be used.
&& Booleans.parseBoolean(System.getProperty("es.real_memory_circuit_breaker.g1_over_limit_strategy.enabled"), true)) { && Booleans.parseBoolean(System.getProperty("opensearch.real_memory_circuit_breaker.g1_over_limit_strategy.enabled"), true)) {
TimeValue lockTimeout = TimeValue.timeValueMillis( TimeValue lockTimeout = TimeValue.timeValueMillis(
Integer.parseInt(System.getProperty("es.real_memory_circuit_breaker.g1_over_limit_strategy.lock_timeout_ms", "500")) Integer.parseInt(System.getProperty("opensearch.real_memory_circuit_breaker.g1_over_limit_strategy.lock_timeout_ms", "500"))
); );
// hardcode interval, do not want any tuning of it outside code changes. // hardcode interval, do not want any tuning of it outside code changes.
return new G1OverLimitStrategy(jvmInfo, HierarchyCircuitBreakerService::realMemoryUsage, createYoungGcCountSupplier(), return new G1OverLimitStrategy(jvmInfo, HierarchyCircuitBreakerService::realMemoryUsage, createYoungGcCountSupplier(),

View File

@ -381,13 +381,13 @@ public final class ConfigurationUtils {
private static void addMetadataToException(OpenSearchException exception, String processorType, private static void addMetadataToException(OpenSearchException exception, String processorType,
String processorTag, String propertyName) { String processorTag, String propertyName) {
if (processorType != null) { if (processorType != null) {
exception.addMetadata("es.processor_type", processorType); exception.addMetadata("opensearch.processor_type", processorType);
} }
if (processorTag != null) { if (processorTag != null) {
exception.addMetadata("es.processor_tag", processorTag); exception.addMetadata("opensearch.processor_tag", processorTag);
} }
if (propertyName != null) { if (propertyName != null) {
exception.addMetadata("es.property_name", propertyName); exception.addMetadata("opensearch.property_name", propertyName);
} }
} }

View File

@ -158,7 +158,7 @@ public class JvmInfo implements ReportingService.Info {
} }
final boolean bundledJdk = Booleans.parseBoolean(System.getProperty("es.bundled_jdk", Boolean.FALSE.toString())); final boolean bundledJdk = Booleans.parseBoolean(System.getProperty("opensearch.bundled_jdk", Boolean.FALSE.toString()));
final Boolean usingBundledJdk = bundledJdk ? usingBundledJdk() : null; final Boolean usingBundledJdk = bundledJdk ? usingBundledJdk() : null;
INSTANCE = new JvmInfo(JvmPid.getPid(), System.getProperty("java.version"), runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), INSTANCE = new JvmInfo(JvmPid.getPid(), System.getProperty("java.version"), runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(),

View File

@ -239,7 +239,7 @@ public class OsProbe {
// this property is to support a hack to workaround an issue with Docker containers mounting the cgroups hierarchy inconsistently with // this property is to support a hack to workaround an issue with Docker containers mounting the cgroups hierarchy inconsistently with
// respect to /proc/self/cgroup; for Docker containers this should be set to "/" // respect to /proc/self/cgroup; for Docker containers this should be set to "/"
private static final String CONTROL_GROUPS_HIERARCHY_OVERRIDE = System.getProperty("es.cgroups.hierarchy.override"); private static final String CONTROL_GROUPS_HIERARCHY_OVERRIDE = System.getProperty("opensearch.cgroups.hierarchy.override");
/** /**
* A map of the control groups to which the OpenSearch process belongs. Note that this is a map because the control groups can vary * A map of the control groups to which the OpenSearch process belongs. Note that this is a map because the control groups can vary

View File

@ -34,7 +34,7 @@ public class RestUtils {
/** /**
* Sets whether we decode a '+' in an url as a space or not. * Sets whether we decode a '+' in an url as a space or not.
*/ */
private static final boolean DECODE_PLUS_AS_SPACE = Booleans.parseBoolean(System.getProperty("es.rest.url_plus_as_space", "false")); private static final boolean DECODE_PLUS_AS_SPACE = Booleans.parseBoolean(System.getProperty("opensearch.rest.url_plus_as_space", "false"));
public static final PathTrie.Decoder REST_DECODER = new PathTrie.Decoder() { public static final PathTrie.Decoder REST_DECODER = new PathTrie.Decoder() {
@Override @Override

View File

@ -95,7 +95,7 @@ import static org.opensearch.search.query.TopDocsCollectorContext.shortcutTotalH
public class QueryPhase { public class QueryPhase {
private static final Logger LOGGER = LogManager.getLogger(QueryPhase.class); private static final Logger LOGGER = LogManager.getLogger(QueryPhase.class);
// TODO: remove this property // TODO: remove this property
public static final boolean SYS_PROP_REWRITE_SORT = Booleans.parseBoolean(System.getProperty("es.search.rewrite_sort", "true")); public static final boolean SYS_PROP_REWRITE_SORT = Booleans.parseBoolean(System.getProperty("opensearch.search.rewrite_sort", "true"));
private final AggregationPhase aggregationPhase; private final AggregationPhase aggregationPhase;
private final SuggestPhase suggestPhase; private final SuggestPhase suggestPhase;

View File

@ -41,7 +41,7 @@ public class TransportInfo implements ReportingService.Info {
/** Whether to add hostname to publish host field when serializing. */ /** Whether to add hostname to publish host field when serializing. */
private static final boolean CNAME_IN_PUBLISH_ADDRESS = private static final boolean CNAME_IN_PUBLISH_ADDRESS =
parseBoolean(System.getProperty("es.transport.cname_in_publish_address"), false); parseBoolean(System.getProperty("opensearch.transport.cname_in_publish_address"), false);
private final BoundTransportAddress address; private final BoundTransportAddress address;
private Map<String, BoundTransportAddress> profileAddresses; private Map<String, BoundTransportAddress> profileAddresses;

View File

@ -124,7 +124,7 @@ public class SearchPhaseExecutionExceptionTests extends OpenSearchTestCase {
assertNotNull(parsedException); assertNotNull(parsedException);
assertThat(parsedException.getHeaderKeys(), hasSize(0)); assertThat(parsedException.getHeaderKeys(), hasSize(0));
assertThat(parsedException.getMetadataKeys(), hasSize(1)); assertThat(parsedException.getMetadataKeys(), hasSize(1));
assertThat(parsedException.getMetadata("es.phase"), hasItem(phase)); assertThat(parsedException.getMetadata("opensearch.phase"), hasItem(phase));
// SearchPhaseExecutionException has no cause field // SearchPhaseExecutionException has no cause field
assertNull(parsedException.getCause()); assertNull(parsedException.getCause());
} }

View File

@ -138,10 +138,10 @@ public class ConfigurationUtilsTests extends OpenSearchTestCase {
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, OpenSearchParseException e = expectThrows(OpenSearchParseException.class,
() -> ConfigurationUtils.readProcessorConfigs(config, scriptService, registry)); () -> ConfigurationUtils.readProcessorConfigs(config, scriptService, registry));
assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]")); assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]"));
assertThat(e.getMetadata("es.processor_tag"), equalTo(Collections.singletonList("my_unknown"))); assertThat(e.getMetadata("opensearch.processor_tag"), equalTo(Collections.singletonList("my_unknown")));
assertThat(e.getMetadata("es.processor_type"), equalTo(Collections.singletonList("unknown_processor"))); assertThat(e.getMetadata("opensearch.processor_type"), equalTo(Collections.singletonList("unknown_processor")));
assertThat(e.getMetadata("es.property_name"), is(nullValue())); assertThat(e.getMetadata("opensearch.property_name"), is(nullValue()));
assertThat(e.getMetadata("es.processor_description"), is(nullValue())); assertThat(e.getMetadata("opensearch.processor_description"), is(nullValue()));
List<Map<String, Object>> config2 = new ArrayList<>(); List<Map<String, Object>> config2 = new ArrayList<>();
unknownTaggedConfig = new HashMap<>(); unknownTaggedConfig = new HashMap<>();
@ -155,17 +155,17 @@ public class ConfigurationUtilsTests extends OpenSearchTestCase {
() -> ConfigurationUtils.readProcessorConfigs(config2, scriptService, registry) () -> ConfigurationUtils.readProcessorConfigs(config2, scriptService, registry)
); );
assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]")); assertThat(e.getMessage(), equalTo("No processor type exists with name [unknown_processor]"));
assertThat(e.getMetadata("es.processor_tag"), equalTo(Collections.singletonList("my_unknown"))); assertThat(e.getMetadata("opensearch.processor_tag"), equalTo(Collections.singletonList("my_unknown")));
assertThat(e.getMetadata("es.processor_type"), equalTo(Collections.singletonList("unknown_processor"))); assertThat(e.getMetadata("opensearch.processor_type"), equalTo(Collections.singletonList("unknown_processor")));
assertThat(e.getMetadata("es.property_name"), is(nullValue())); assertThat(e.getMetadata("opensearch.property_name"), is(nullValue()));
assertThat(e.getSuppressed().length, equalTo(1)); assertThat(e.getSuppressed().length, equalTo(1));
assertThat(e.getSuppressed()[0], instanceOf(OpenSearchParseException.class)); assertThat(e.getSuppressed()[0], instanceOf(OpenSearchParseException.class));
OpenSearchParseException e2 = (OpenSearchParseException) e.getSuppressed()[0]; OpenSearchParseException e2 = (OpenSearchParseException) e.getSuppressed()[0];
assertThat(e2.getMessage(), equalTo("No processor type exists with name [second_unknown_processor]")); assertThat(e2.getMessage(), equalTo("No processor type exists with name [second_unknown_processor]"));
assertThat(e2.getMetadata("es.processor_tag"), equalTo(Collections.singletonList("my_second_unknown"))); assertThat(e2.getMetadata("opensearch.processor_tag"), equalTo(Collections.singletonList("my_second_unknown")));
assertThat(e2.getMetadata("es.processor_type"), equalTo(Collections.singletonList("second_unknown_processor"))); assertThat(e2.getMetadata("opensearch.processor_type"), equalTo(Collections.singletonList("second_unknown_processor")));
assertThat(e2.getMetadata("es.property_name"), is(nullValue())); assertThat(e2.getMetadata("opensearch.property_name"), is(nullValue()));
} }
public void testReadProcessorNullDescription() throws Exception { public void testReadProcessorNullDescription() throws Exception {

View File

@ -628,8 +628,8 @@ public class IngestServiceTests extends OpenSearchTestCase {
OpenSearchParseException e = OpenSearchParseException e =
expectThrows(OpenSearchParseException.class, () -> ingestService.validatePipeline(ingestInfos, putRequest)); expectThrows(OpenSearchParseException.class, () -> ingestService.validatePipeline(ingestInfos, putRequest));
assertEquals("Processor type [remove] is not installed on node [" + node2 + "]", e.getMessage()); assertEquals("Processor type [remove] is not installed on node [" + node2 + "]", e.getMessage());
assertEquals("remove", e.getMetadata("es.processor_type").get(0)); assertEquals("remove", e.getMetadata("opensearch.processor_type").get(0));
assertEquals("tag2", e.getMetadata("es.processor_tag").get(0)); assertEquals("tag2", e.getMetadata("opensearch.processor_tag").get(0));
ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove"))));
ingestService.validatePipeline(ingestInfos, putRequest); ingestService.validatePipeline(ingestInfos, putRequest);

View File

@ -282,8 +282,8 @@ public class BytesRestResponseTests extends OpenSearchTestCase {
expected.addHeader("foo", "bar", "baz"); expected.addHeader("foo", "bar", "baz");
} }
if (randomBoolean()) { if (randomBoolean()) {
originalException.addMetadata("es.metadata_0", "0"); originalException.addMetadata("opensearch.metadata_0", "0");
expected.addMetadata("es.metadata_0", "0"); expected.addMetadata("opensearch.metadata_0", "0");
} }
if (randomBoolean()) { if (randomBoolean()) {
String resourceType = randomAlphaOfLength(5); String resourceType = randomAlphaOfLength(5);
@ -336,7 +336,7 @@ public class BytesRestResponseTests extends OpenSearchTestCase {
super(""); super("");
this.addHeader("n1", "v11", "v12"); this.addHeader("n1", "v11", "v12");
this.addHeader("n2", "v21", "v22"); this.addHeader("n2", "v21", "v22");
this.addMetadata("es.test", "value1", "value2"); this.addMetadata("opensearch.test", "value1", "value2");
} }
} }

View File

@ -120,7 +120,7 @@ public class ListTasksResponseTests extends AbstractXContentTestCase<ListTasksRe
for (int i = 0; i < nodeFailures.size(); i++) { for (int i = 0; i < nodeFailures.size(); i++) {
OpenSearchException newException = nodeFailures.get(i); OpenSearchException newException = nodeFailures.get(i);
OpenSearchException expectedException = expectedFailures.get(i); OpenSearchException expectedException = expectedFailures.get(i);
assertThat(newException.getMetadata("es.node_id").get(0), equalTo(((FailedNodeException)expectedException).nodeId())); assertThat(newException.getMetadata("opensearch.node_id").get(0), equalTo(((FailedNodeException)expectedException).nodeId()));
assertThat(newException.getMessage(), equalTo("Elasticsearch exception [type=failed_node_exception, reason=error message]")); assertThat(newException.getMessage(), equalTo("Elasticsearch exception [type=failed_node_exception, reason=error message]"));
assertThat(newException.getCause(), instanceOf(OpenSearchException.class)); assertThat(newException.getCause(), instanceOf(OpenSearchException.class));
OpenSearchException cause = (OpenSearchException) newException.getCause(); OpenSearchException cause = (OpenSearchException) newException.getCause();

View File

@ -378,11 +378,11 @@ public final class InternalTestCluster extends TestCluster {
builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos"));
builder.put(TransportSettings.PORT.getKey(), 0); builder.put(TransportSettings.PORT.getKey(), 0);
builder.put("http.port", 0); builder.put("http.port", 0);
if (Strings.hasLength(System.getProperty("tests.es.logger.level"))) { if (Strings.hasLength(System.getProperty("tests.opensearch.logger.level"))) {
builder.put("logger.level", System.getProperty("tests.es.logger.level")); builder.put("logger.level", System.getProperty("tests.opensearch.logger.level"));
} }
if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { if (Strings.hasLength(System.getProperty("opensearch.logger.prefix"))) {
builder.put("logger.prefix", System.getProperty("es.logger.prefix")); builder.put("logger.prefix", System.getProperty("opensearch.logger.prefix"));
} }
// Default the watermarks to absurdly low to prevent the tests // Default the watermarks to absurdly low to prevent the tests
// from failing on nodes without enough disk space // from failing on nodes without enough disk space

View File

@ -36,7 +36,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
*/ */
public class MockLogAppender extends AbstractAppender { public class MockLogAppender extends AbstractAppender {
private static final String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch."); private static final String COMMON_PREFIX = System.getProperty("opensearch.logger.prefix", "org.elasticsearch.");
private List<LoggingExpectation> expectations; private List<LoggingExpectation> expectations;

View File

@ -158,7 +158,7 @@ public class ReproduceInfoPrinter extends RunListener {
} }
private ReproduceErrorMessageBuilder appendESProperties() { private ReproduceErrorMessageBuilder appendESProperties() {
appendProperties("tests.es.logger.level"); appendProperties("tests.opensearch.logger.level");
if (inVerifyPhase()) { if (inVerifyPhase()) {
// these properties only make sense for integration tests // these properties only make sense for integration tests
appendProperties(OpenSearchIntegTestCase.TESTS_ENABLE_MOCK_MODULES); appendProperties(OpenSearchIntegTestCase.TESTS_ENABLE_MOCK_MODULES);