Perform more renaming to OpenSearch. (#470)
This commit performs more renaming to OpenSearch which were missed in the earlier commits. Signed-off-by: Rabi Panda <adnapibar@gmail.com>
This commit is contained in:
parent
b255d11025
commit
70cad29af3
|
@ -20,10 +20,7 @@
|
|||
package org.opensearch.gradle
|
||||
|
||||
import org.opensearch.gradle.fixtures.AbstractGradleFuncTest
|
||||
import org.gradle.testkit.runner.GradleRunner
|
||||
import spock.lang.IgnoreIf
|
||||
import spock.lang.Requires
|
||||
import spock.util.environment.OperatingSystem
|
||||
|
||||
import static org.opensearch.gradle.fixtures.DistributionDownloadFixture.withMockedDistributionDownload
|
||||
|
||||
|
@ -70,8 +67,8 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
|
|||
|
||||
then:
|
||||
result.output.contains("opensearch-keystore script executed!")
|
||||
assertEsStdoutContains("myCluster", "Starting OpenSearch process")
|
||||
assertEsStdoutContains("myCluster", "Stopping node")
|
||||
assertOpenSearchStdoutContains("myCluster", "Starting OpenSearch process")
|
||||
assertOpenSearchStdoutContains("myCluster", "Stopping node")
|
||||
assertNoCustomDistro('myCluster')
|
||||
}
|
||||
|
||||
|
@ -97,12 +94,12 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
|
|||
|
||||
then:
|
||||
result.output.contains("opensearch-keystore script executed!")
|
||||
assertEsStdoutContains("myCluster", "Starting OpenSearch process")
|
||||
assertEsStdoutContains("myCluster", "Stopping node")
|
||||
assertOpenSearchStdoutContains("myCluster", "Starting OpenSearch process")
|
||||
assertOpenSearchStdoutContains("myCluster", "Stopping node")
|
||||
assertCustomDistro('myCluster')
|
||||
}
|
||||
|
||||
boolean assertEsStdoutContains(String testCluster, String expectedOutput) {
|
||||
boolean assertOpenSearchStdoutContains(String testCluster, String expectedOutput) {
|
||||
assert new File(testProjectDir.root,
|
||||
"build/testclusters/${testCluster}-0/logs/opensearch.stdout.log").text.contains(expectedOutput)
|
||||
true
|
||||
|
|
|
@ -98,7 +98,7 @@ class NodeInfo {
|
|||
String executable
|
||||
|
||||
/** Path to the opensearch start script */
|
||||
private Object esScript
|
||||
private Object opensearchScript
|
||||
|
||||
/** script to run when running in the background */
|
||||
private File wrapperScript
|
||||
|
@ -154,11 +154,11 @@ class NodeInfo {
|
|||
* We have to delay building the string as the path will not exist during configuration which will fail on Windows due to
|
||||
* getting the short name requiring the path to already exist.
|
||||
*/
|
||||
esScript = "${-> binPath().resolve('opensearch.bat').toString()}"
|
||||
opensearchScript = "${-> binPath().resolve('opensearch.bat').toString()}"
|
||||
} else {
|
||||
executable = 'bash'
|
||||
wrapperScript = new File(cwd, "run")
|
||||
esScript = binPath().resolve('opensearch')
|
||||
opensearchScript = binPath().resolve('opensearch')
|
||||
}
|
||||
if (config.daemonize) {
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
|
@ -171,7 +171,7 @@ class NodeInfo {
|
|||
args.add("${wrapperScript}")
|
||||
}
|
||||
} else {
|
||||
args.add("${esScript}")
|
||||
args.add("${opensearchScript}")
|
||||
}
|
||||
|
||||
|
||||
|
@ -270,7 +270,7 @@ class NodeInfo {
|
|||
argsPasser = '%*'
|
||||
exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
|
||||
}
|
||||
wrapperScript.setText("\"${esScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
wrapperScript.setText("\"${opensearchScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
}
|
||||
|
||||
/** Returns an address and port suitable for a uri to connect to this node over http */
|
||||
|
|
|
@ -49,10 +49,10 @@ public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin {
|
|||
resourcesTask.configure(t -> {
|
||||
t.setOutputDir(resourcesDir.toFile());
|
||||
t.copy("forbidden/jdk-signatures.txt");
|
||||
t.copy("forbidden/es-all-signatures.txt");
|
||||
t.copy("forbidden/es-test-signatures.txt");
|
||||
t.copy("forbidden/opensearch-all-signatures.txt");
|
||||
t.copy("forbidden/opensearch-test-signatures.txt");
|
||||
t.copy("forbidden/http-signatures.txt");
|
||||
t.copy("forbidden/es-server-signatures.txt");
|
||||
t.copy("forbidden/opensearch-server-signatures.txt");
|
||||
});
|
||||
project.getTasks().withType(CheckForbiddenApis.class).configureEach(t -> {
|
||||
t.dependsOn(resourcesTask);
|
||||
|
@ -79,7 +79,10 @@ public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin {
|
|||
}
|
||||
t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"));
|
||||
t.setSignaturesFiles(
|
||||
project.files(resourcesDir.resolve("forbidden/jdk-signatures.txt"), resourcesDir.resolve("forbidden/es-all-signatures.txt"))
|
||||
project.files(
|
||||
resourcesDir.resolve("forbidden/jdk-signatures.txt"),
|
||||
resourcesDir.resolve("forbidden/opensearch-all-signatures.txt")
|
||||
)
|
||||
);
|
||||
t.setSuppressAnnotations(Set.of("**.SuppressForbidden"));
|
||||
if (t.getName().endsWith("Test")) {
|
||||
|
@ -87,14 +90,14 @@ public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin {
|
|||
t.getSignaturesFiles()
|
||||
.plus(
|
||||
project.files(
|
||||
resourcesDir.resolve("forbidden/es-test-signatures.txt"),
|
||||
resourcesDir.resolve("forbidden/opensearch-test-signatures.txt"),
|
||||
resourcesDir.resolve("forbidden/http-signatures.txt")
|
||||
)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
t.setSignaturesFiles(
|
||||
t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt")))
|
||||
t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/opensearch-server-signatures.txt")))
|
||||
);
|
||||
}
|
||||
ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties();
|
||||
|
|
|
@ -150,15 +150,15 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
private final Path confPathLogs;
|
||||
private final Path transportPortFile;
|
||||
private final Path httpPortsFile;
|
||||
private final Path esStdoutFile;
|
||||
private final Path esStderrFile;
|
||||
private final Path esStdinFile;
|
||||
private final Path opensearchStdoutFile;
|
||||
private final Path opensearchStderrFile;
|
||||
private final Path opensearchStdinFile;
|
||||
private final Path tmpDir;
|
||||
|
||||
private int currentDistro = 0;
|
||||
private TestDistribution testDistribution;
|
||||
private List<OpenSearchDistribution> distributions = new ArrayList<>();
|
||||
private volatile Process esProcess;
|
||||
private volatile Process opensearchProcess;
|
||||
private Function<String, String> nameCustomization = Function.identity();
|
||||
private boolean isWorkingDirConfigured = false;
|
||||
private String httpPort = "0";
|
||||
|
@ -191,9 +191,9 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
confPathLogs = workingDir.resolve("logs");
|
||||
transportPortFile = confPathLogs.resolve("transport.ports");
|
||||
httpPortsFile = confPathLogs.resolve("http.ports");
|
||||
esStdoutFile = confPathLogs.resolve("opensearch.stdout.log");
|
||||
esStderrFile = confPathLogs.resolve("opensearch.stderr.log");
|
||||
esStdinFile = workingDir.resolve("opensearch.stdin");
|
||||
opensearchStdoutFile = confPathLogs.resolve("opensearch.stdout.log");
|
||||
opensearchStderrFile = confPathLogs.resolve("opensearch.stderr.log");
|
||||
opensearchStdinFile = workingDir.resolve("opensearch.stdin");
|
||||
tmpDir = workingDir.resolve("tmp");
|
||||
waitConditions.put("ports files", this::checkPortsFilesExistWithDelay);
|
||||
|
||||
|
@ -438,7 +438,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
* @return stream of log lines
|
||||
*/
|
||||
public Stream<String> logLines() throws IOException {
|
||||
return Files.lines(esStdoutFile, StandardCharsets.UTF_8);
|
||||
return Files.lines(opensearchStdoutFile, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -540,11 +540,11 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
|
||||
private void logToProcessStdout(String message) {
|
||||
try {
|
||||
if (Files.exists(esStdoutFile.getParent()) == false) {
|
||||
Files.createDirectories(esStdoutFile.getParent());
|
||||
if (Files.exists(opensearchStdoutFile.getParent()) == false) {
|
||||
Files.createDirectories(opensearchStdoutFile.getParent());
|
||||
}
|
||||
Files.write(
|
||||
esStdoutFile,
|
||||
opensearchStdoutFile,
|
||||
("[" + Instant.now().toString() + "] [BUILD] " + message + "\n").getBytes(StandardCharsets.UTF_8),
|
||||
StandardOpenOption.CREATE,
|
||||
StandardOpenOption.APPEND
|
||||
|
@ -784,24 +784,24 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
environment.putAll(getESEnvironment());
|
||||
|
||||
// don't buffer all in memory, make sure we don't block on the default pipes
|
||||
processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(esStderrFile.toFile()));
|
||||
processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(esStdoutFile.toFile()));
|
||||
processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(opensearchStderrFile.toFile()));
|
||||
processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(opensearchStdoutFile.toFile()));
|
||||
|
||||
if (keystorePassword != null && keystorePassword.length() > 0) {
|
||||
try {
|
||||
Files.write(esStdinFile, (keystorePassword + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
|
||||
processBuilder.redirectInput(esStdinFile.toFile());
|
||||
Files.write(opensearchStdinFile, (keystorePassword + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
|
||||
processBuilder.redirectInput(opensearchStdinFile.toFile());
|
||||
} catch (IOException e) {
|
||||
throw new TestClustersException("Failed to set the keystore password for " + this, e);
|
||||
}
|
||||
}
|
||||
LOGGER.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment);
|
||||
try {
|
||||
esProcess = processBuilder.start();
|
||||
opensearchProcess = processBuilder.start();
|
||||
} catch (IOException e) {
|
||||
throw new TestClustersException("Failed to start ES process for " + this, e);
|
||||
}
|
||||
reaper.registerPid(toString(), esProcess.pid());
|
||||
reaper.registerPid(toString(), opensearchProcess.pid());
|
||||
}
|
||||
|
||||
@Internal
|
||||
|
@ -860,21 +860,21 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
if (esProcess == null && tailLogs) {
|
||||
if (opensearchProcess == null && tailLogs) {
|
||||
// This is a special case. If start() throws an exception the plugin will still call stop
|
||||
// Another exception here would eat the orriginal.
|
||||
return;
|
||||
}
|
||||
LOGGER.info("Stopping `{}`, tailLogs: {}", this, tailLogs);
|
||||
requireNonNull(esProcess, "Can't stop `" + this + "` as it was not started or already stopped.");
|
||||
requireNonNull(opensearchProcess, "Can't stop `" + this + "` as it was not started or already stopped.");
|
||||
// Test clusters are not reused, don't spend time on a graceful shutdown
|
||||
stopHandle(esProcess.toHandle(), true);
|
||||
stopHandle(opensearchProcess.toHandle(), true);
|
||||
reaper.unregister(toString());
|
||||
if (tailLogs) {
|
||||
logFileContents("Standard output of node", esStdoutFile);
|
||||
logFileContents("Standard error of node", esStderrFile);
|
||||
logFileContents("Standard output of node", opensearchStdoutFile);
|
||||
logFileContents("Standard error of node", opensearchStderrFile);
|
||||
}
|
||||
esProcess = null;
|
||||
opensearchProcess = null;
|
||||
// Clean up the ports file in case this is started again.
|
||||
try {
|
||||
if (Files.exists(httpPortsFile)) {
|
||||
|
@ -1348,8 +1348,8 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
@Override
|
||||
@Internal
|
||||
public boolean isProcessAlive() {
|
||||
requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?");
|
||||
return esProcess.isAlive();
|
||||
requireNonNull(opensearchProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?");
|
||||
return opensearchProcess.isAlive();
|
||||
}
|
||||
|
||||
void waitForAllConditions() {
|
||||
|
@ -1414,13 +1414,13 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
}
|
||||
|
||||
@Internal
|
||||
Path getEsStdoutFile() {
|
||||
return esStdoutFile;
|
||||
Path getOpensearchStdoutFile() {
|
||||
return opensearchStdoutFile;
|
||||
}
|
||||
|
||||
@Internal
|
||||
Path getEsStderrFile() {
|
||||
return esStderrFile;
|
||||
Path getOpensearchStderrFile() {
|
||||
return opensearchStderrFile;
|
||||
}
|
||||
|
||||
private static class FileEntry implements Named {
|
||||
|
|
|
@ -150,7 +150,7 @@ public class RunTask extends DefaultTestClustersTask {
|
|||
try {
|
||||
for (OpenSearchCluster cluster : getClusters()) {
|
||||
for (OpenSearchNode node : cluster.getNodes()) {
|
||||
BufferedReader reader = Files.newBufferedReader(node.getEsStdoutFile());
|
||||
BufferedReader reader = Files.newBufferedReader(node.getOpensearchStdoutFile());
|
||||
toRead.add(reader);
|
||||
aliveChecks.add(node::isProcessAlive);
|
||||
}
|
||||
|
|
|
@ -307,11 +307,11 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
{
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 20));
|
||||
.source(XContentType.JSON, "company", "OpenSearch", "age", 20));
|
||||
request.add(new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 30));
|
||||
.source(XContentType.JSON, "company", "OpenSearch", "age", 30));
|
||||
request.add(new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 40));
|
||||
.source(XContentType.JSON, "company", "OpenSearch", "age", 40));
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.OK, bulkResponse.status());
|
||||
|
@ -334,7 +334,7 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
// tag::search-request-aggregations-get
|
||||
Aggregations aggregations = searchResponse.getAggregations();
|
||||
Terms byCompanyAggregation = aggregations.get("by_company"); // <1>
|
||||
Bucket elasticBucket = byCompanyAggregation.getBucketByKey("Elastic"); // <2>
|
||||
Bucket elasticBucket = byCompanyAggregation.getBucketByKey("OpenSearch"); // <2>
|
||||
Avg averageAge = elasticBucket.getAggregations().get("average_age"); // <3>
|
||||
double avg = averageAge.getValue();
|
||||
// end::search-request-aggregations-get
|
||||
|
@ -368,7 +368,7 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
for (Aggregation agg : aggregations) {
|
||||
String type = agg.getType();
|
||||
if (type.equals(TermsAggregationBuilder.NAME)) {
|
||||
Bucket elasticBucket = ((Terms) agg).getBucketByKey("Elastic");
|
||||
Bucket elasticBucket = ((Terms) agg).getBucketByKey("OpenSearch");
|
||||
long numberOfDocs = elasticBucket.getDocCount();
|
||||
}
|
||||
}
|
||||
|
@ -1404,7 +1404,7 @@ public class SearchDocumentationIT extends OpenSearchRestHighLevelClientTestCase
|
|||
.source(XContentType.JSON, "title", "Doubling Down on Open?", "user",
|
||||
Collections.singletonList("foobar"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("blog").id("2")
|
||||
.source(XContentType.JSON, "title", "Swiftype Joins Forces with Elastic", "user",
|
||||
.source(XContentType.JSON, "title", "XYZ Joins Forces with OpenSearch", "user",
|
||||
Arrays.asList("foobar", "matt"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("blog").id("3")
|
||||
.source(XContentType.JSON, "title", "On Net Neutrality", "user",
|
||||
|
|
|
@ -37,7 +37,7 @@ dependencies {
|
|||
forbiddenApisTest {
|
||||
// we don't use the core test-framework, no lucene classes present so we don't want the es-test-signatures to
|
||||
// be pulled in
|
||||
replaceSignatureFiles 'jdk-signatures', 'es-all-signatures'
|
||||
replaceSignatureFiles 'jdk-signatures', 'opensearch-all-signatures'
|
||||
}
|
||||
|
||||
testingConventions {
|
||||
|
|
|
@ -177,7 +177,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
|
|||
BytesRef joinName = docValues.lookupOrd(ord);
|
||||
return joinName.utf8ToString();
|
||||
} catch (IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ public final class ExceptionsHelper {
|
|||
return new OpenSearchException(e);
|
||||
}
|
||||
|
||||
public static OpenSearchException convertToElastic(Exception e) {
|
||||
public static OpenSearchException convertToOpenSearchException(Exception e) {
|
||||
if (e instanceof OpenSearchException) {
|
||||
return (OpenSearchException) e;
|
||||
}
|
||||
|
|
|
@ -282,7 +282,7 @@ public class MetadataUpdateSettingsService {
|
|||
indicesService.verifyIndexMetadata(updatedMetadata, updatedMetadata);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw ExceptionsHelper.convertToElastic(ex);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(ex);
|
||||
}
|
||||
return updatedState;
|
||||
}
|
||||
|
|
|
@ -194,7 +194,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent
|
|||
try {
|
||||
return getAndLoadIfNotPresent(query, context);
|
||||
} catch (ExecutionException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -320,7 +320,7 @@ public final class ConfigurationUtils {
|
|||
|
||||
public static OpenSearchException newConfigurationException(String processorType, String processorTag,
|
||||
String propertyName, Exception cause) {
|
||||
OpenSearchException exception = ExceptionsHelper.convertToElastic(cause);
|
||||
OpenSearchException exception = ExceptionsHelper.convertToOpenSearchException(cause);
|
||||
addMetadataToException(exception, processorType, processorTag, propertyName);
|
||||
return exception;
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ public class RestController implements HttpServerTransport.Dispatcher {
|
|||
|
||||
private static final Logger logger = LogManager.getLogger(RestController.class);
|
||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestController.class);
|
||||
private static final String ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER = "X-elastic-product-origin";
|
||||
private static final String OPENSEARCH_PRODUCT_ORIGIN_HTTP_HEADER = "X-opensearch-product-origin";
|
||||
|
||||
private static final BytesReference FAVICON_RESPONSE;
|
||||
|
||||
|
@ -248,8 +248,8 @@ public class RestController implements HttpServerTransport.Dispatcher {
|
|||
if (handler.allowsUnsafeBuffers() == false) {
|
||||
request.ensureSafeBuffers();
|
||||
}
|
||||
if (handler.allowSystemIndexAccessByDefault() == false && request.header(ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER) == null) {
|
||||
// The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product with a plan
|
||||
if (handler.allowSystemIndexAccessByDefault() == false && request.header(OPENSEARCH_PRODUCT_ORIGIN_HTTP_HEADER) == null) {
|
||||
// The OPENSEARCH_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an OpenSearch product with a plan
|
||||
// to move away from direct access to system indices, and thus deprecation warnings should not be emitted.
|
||||
// This header is intended for internal use only.
|
||||
client.threadPool().getThreadContext().putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
|
||||
|
|
|
@ -73,7 +73,7 @@ public final class ScoreScriptUtils {
|
|||
int hash = StringHelper.murmurhash3_x86_32(new BytesRef(seedValue), saltedSeed);
|
||||
return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0
|
||||
} catch (Exception e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ public class LeafDocLookup implements Map<String, ScriptDocValues<?>> {
|
|||
try {
|
||||
scriptValues.setNextDocId(docId);
|
||||
} catch (IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(e);
|
||||
}
|
||||
return scriptValues;
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class PreConfiguredTokenFilterTests extends OpenSearchTestCase {
|
|||
assertSame(tff_v1_1, tff_v2);
|
||||
}
|
||||
|
||||
public void testCachingWithElasticsearchVersion() throws IOException {
|
||||
public void testCachingWithOpenSearchVersion() throws IOException {
|
||||
PreConfiguredTokenFilter pctf =
|
||||
PreConfiguredTokenFilter.openSearchVersion("opensearch_version", randomBoolean(),
|
||||
(tokenStream, esVersion) -> new TokenFilter(tokenStream) {
|
||||
|
|
|
@ -65,7 +65,7 @@ public class SyncedFlushUtil {
|
|||
}
|
||||
});
|
||||
if (listenerHolder.get().error != null) {
|
||||
throw ExceptionsHelper.convertToElastic(listenerHolder.get().error);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(listenerHolder.get().error);
|
||||
}
|
||||
return listenerHolder.get().result;
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ public class SyncedFlushUtil {
|
|||
Thread.currentThread().interrupt();
|
||||
}
|
||||
if (listener.error != null) {
|
||||
throw ExceptionsHelper.convertToElastic(listener.error);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(listener.error);
|
||||
}
|
||||
return listener.result;
|
||||
}
|
||||
|
|
|
@ -298,7 +298,7 @@ public class ContextIndexSearcherTests extends OpenSearchTestCase {
|
|||
try {
|
||||
return new DocumentSubsetReader(reader, bitsetFilterCache, roleQuery);
|
||||
} catch (Exception e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
throw ExceptionsHelper.convertToOpenSearchException(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -43,7 +43,7 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes'
|
|||
|
||||
// the main files are actually test files, so use the appropriate forbidden api sigs
|
||||
tasks.named('forbiddenApisMain').configure {
|
||||
replaceSignatureFiles 'jdk-signatures', 'es-all-signatures', 'es-test-signatures'
|
||||
replaceSignatureFiles 'jdk-signatures', 'opensearch-all-signatures', 'opensearch-test-signatures'
|
||||
}
|
||||
|
||||
// TODO: should we have licenses for our test deps?
|
||||
|
|
Loading…
Reference in New Issue