[Rename] client/sniffer (#243)

Refactor client/sniffer module as part of the Elasticsearch to OpenSearch renaming effort.

Signed-off-by: Rabi Panda <adnapibar@gmail.com>
This commit is contained in:
Rabi Panda 2021-03-09 14:14:50 -08:00 committed by Nick Knize
parent 1203aa7302
commit 223ce87447
22 changed files with 86 additions and 86 deletions

View File

@ -16,14 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.publish'
apply plugin: 'opensearch.build'
apply plugin: 'opensearch.publish'
targetCompatibility = JavaVersion.VERSION_1_8
sourceCompatibility = JavaVersion.VERSION_1_8
group = 'org.elasticsearch.client'
archivesBaseName = 'elasticsearch-rest-client-sniffer'
group = 'org.opensearch.client'
archivesBaseName = 'opensearch-rest-client-sniffer'
dependencies {
api project(":client:rest")
@ -36,8 +36,8 @@ dependencies {
testImplementation project(":client:test")
testImplementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testImplementation "junit:junit:${versions.junit}"
testImplementation "org.elasticsearch:securemock:${versions.securemock}"
testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}"
testImplementation "org.opensearch:securemock:${versions.securemock}"
testImplementation "org.opensearch:mocksocket:${versions.mocksocket}"
}
tasks.named('forbiddenApisMain').configure {
@ -59,14 +59,14 @@ tasks.named("dependencyLicenses").configure {
}
// JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core
// TODO: Not anymore. Now in :libs:opensearch-core
jarHell.enabled = false
testingConventions {
naming.clear()
naming {
Tests {
baseClass 'org.elasticsearch.client.RestClientTestCase'
baseClass 'org.opensearch.client.RestClientTestCase'
}
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.elasticsearch.client.Node;
@ -29,7 +29,7 @@ import java.util.List;
*/
public interface NodesSniffer {
/**
* Returns the sniffed Elasticsearch nodes.
* Returns the sniffed OpenSearch nodes.
*/
List<Node> sniff() throws IOException;
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
@ -50,12 +50,12 @@ import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableMap;
/**
* Class responsible for sniffing the http hosts from elasticsearch through the nodes info api and returning them back.
* Class responsible for sniffing the http hosts from opensearch through the nodes info api and returning them back.
* Compatible with elasticsearch 2.x+.
*/
public final class ElasticsearchNodesSniffer implements NodesSniffer {
public final class OpenSearchNodesSniffer implements NodesSniffer {
private static final Log logger = LogFactory.getLog(ElasticsearchNodesSniffer.class);
private static final Log logger = LogFactory.getLog(OpenSearchNodesSniffer.class);
public static final long DEFAULT_SNIFF_REQUEST_TIMEOUT = TimeUnit.SECONDS.toMillis(1);
@ -65,29 +65,29 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
private final JsonFactory jsonFactory = new JsonFactory();
/**
* Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts,
* Creates a new instance of the OpenSearch sniffer. It will use the provided {@link RestClient} to fetch the hosts,
* through the nodes info api, the default sniff request timeout value {@link #DEFAULT_SNIFF_REQUEST_TIMEOUT} and http
* as the scheme for all the hosts.
* @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance
* @param restClient client used to fetch the hosts from opensearch through nodes info api. Usually the same instance
* that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same
* client that was used to fetch them.
*/
public ElasticsearchNodesSniffer(RestClient restClient) {
this(restClient, DEFAULT_SNIFF_REQUEST_TIMEOUT, ElasticsearchNodesSniffer.Scheme.HTTP);
public OpenSearchNodesSniffer(RestClient restClient) {
this(restClient, DEFAULT_SNIFF_REQUEST_TIMEOUT, OpenSearchNodesSniffer.Scheme.HTTP);
}
/**
* Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts
* Creates a new instance of the OpenSearch sniffer. It will use the provided {@link RestClient} to fetch the hosts
* through the nodes info api, the provided sniff request timeout value and scheme.
* @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance
* @param restClient client used to fetch the hosts from opensearch through nodes info api. Usually the same instance
* that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same
* client that was used to sniff them.
* @param sniffRequestTimeoutMillis the sniff request timeout (in milliseconds) to be passed in as a query string parameter
* to elasticsearch. Allows to halt the request without any failure, as only the nodes
* to opensearch. Allows to halt the request without any failure, as only the nodes
* that have responded within this timeout will be returned.
* @param scheme the scheme to associate sniffed nodes with (as it is not returned by elasticsearch)
* @param scheme the scheme to associate sniffed nodes with (as it is not returned by opensearch)
*/
public ElasticsearchNodesSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) {
public OpenSearchNodesSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) {
this.restClient = Objects.requireNonNull(restClient, "restClient cannot be null");
if (sniffRequestTimeoutMillis < 0) {
throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0");
@ -98,7 +98,7 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
}
/**
* Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts
* Calls the opensearch nodes info api, parses the response and returns all the found http hosts
*/
@Override
public List<Node> sniff() throws IOException {
@ -138,7 +138,7 @@ public final class ElasticsearchNodesSniffer implements NodesSniffer {
HttpHost publishedHost = null;
/*
* We sniff the bound hosts so we can look up the node based on any
* address on which it is listening. This is useful in Elasticsearch's
* address on which it is listening. This is useful in OpenSearch's
* test framework where we sometimes publish ipv6 addresses but the
* tests contact the node on ipv4.
*/

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -42,7 +42,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* Class responsible for sniffing nodes from some source (default is elasticsearch itself) and setting them to a provided instance of
* Class responsible for sniffing nodes from some source (default is opensearch itself) and setting them to a provided instance of
* {@link RestClient}. Must be created via {@link SnifferBuilder}, which allows to set all of the different options or rely on defaults.
* A background task fetches the nodes through the {@link NodesSniffer} and sets them to the {@link RestClient} instance.
* It is possible to perform sniffing on failure by creating a {@link SniffOnFailureListener} and providing it as an argument to

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.elasticsearch.client.RestClient;
@ -37,7 +37,7 @@ public final class SnifferBuilder {
private NodesSniffer nodesSniffer;
/**
* Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with elasticsearch
* Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with opensearch
*/
SnifferBuilder(RestClient restClient) {
Objects.requireNonNull(restClient, "restClient cannot be null");
@ -69,8 +69,8 @@ public final class SnifferBuilder {
}
/**
* Sets the {@link NodesSniffer} to be used to read hosts. A default instance of {@link ElasticsearchNodesSniffer}
* is created when not provided. This method can be used to change the configuration of the {@link ElasticsearchNodesSniffer},
* Sets the {@link NodesSniffer} to be used to read hosts. A default instance of {@link OpenSearchNodesSniffer}
* is created when not provided. This method can be used to change the configuration of the {@link OpenSearchNodesSniffer},
* or to provide a different implementation (e.g. in case hosts need to taken from a different source).
*/
public SnifferBuilder setNodesSniffer(NodesSniffer nodesSniffer) {
@ -84,7 +84,7 @@ public final class SnifferBuilder {
*/
public Sniffer build() {
if (nodesSniffer == null) {
this.nodesSniffer = new ElasticsearchNodesSniffer(restClient);
this.nodesSniffer = new OpenSearchNodesSniffer(restClient);
}
return new Sniffer(restClient, nodesSniffer, sniffIntervalMillis, sniffAfterFailureDelayMillis);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.apache.http.HttpHost;
import org.elasticsearch.client.Node;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import com.fasterxml.jackson.core.JsonFactory;
import org.apache.http.HttpEntity;
@ -27,7 +27,7 @@ import org.apache.http.entity.InputStreamEntity;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.Node.Roles;
import org.elasticsearch.client.RestClientTestCase;
import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer.Scheme;
import org.opensearch.client.sniff.OpenSearchNodesSniffer.Scheme;
import java.io.IOException;
import java.io.InputStream;
@ -46,9 +46,9 @@ import static org.junit.Assert.assertThat;
/**
* Test parsing the response from the {@code /_nodes/http} API from fixed
* versions of Elasticsearch.
* versions of OpenSearch.
*/
public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
public class OpenSearchNodesSnifferParseTests extends RestClientTestCase {
private void checkFile(String file, Node... expected) throws IOException {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(file);
@ -57,7 +57,7 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
}
try {
HttpEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
List<Node> nodes = ElasticsearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
List<Node> nodes = OpenSearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
/*
* Use these assertions because the error messages are nicer
* than hasItems and we know the results are in order because
@ -124,7 +124,7 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("es6_nodes_publication_address_format.json");
HttpEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
List<Node> nodes = ElasticsearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
List<Node> nodes = OpenSearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
assertEquals("127.0.0.1", nodes.get(0).getHost().getHostName());
assertEquals(9200, nodes.get(0).getHost().getPort());
@ -135,7 +135,7 @@ public class ElasticsearchNodesSnifferParseTests extends RestClientTestCase {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("es7_nodes_publication_address_format.json");
HttpEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
List<Node> nodes = ElasticsearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
List<Node> nodes = OpenSearchNodesSniffer.readHosts(entity, Scheme.HTTP, new JsonFactory());
assertEquals("elastic.test", nodes.get(0).getHost().getHostName());
assertEquals(9200, nodes.get(0).getHost().getPort());

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
@ -61,17 +61,17 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
public class OpenSearchNodesSnifferTests extends RestClientTestCase {
private int sniffRequestTimeout;
private ElasticsearchNodesSniffer.Scheme scheme;
private OpenSearchNodesSniffer.Scheme scheme;
private SniffResponse sniffResponse;
private HttpServer httpServer;
@Before
public void startHttpServer() throws IOException {
this.sniffRequestTimeout = RandomNumbers.randomIntBetween(getRandom(), 1000, 10000);
this.scheme = RandomPicks.randomFrom(getRandom(), ElasticsearchNodesSniffer.Scheme.values());
this.scheme = RandomPicks.randomFrom(getRandom(), OpenSearchNodesSniffer.Scheme.values());
if (rarely()) {
this.sniffResponse = SniffResponse.buildFailure();
} else {
@ -88,7 +88,7 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
public void testConstructorValidation() throws IOException {
try {
new ElasticsearchNodesSniffer(null, 1, ElasticsearchNodesSniffer.Scheme.HTTP);
new OpenSearchNodesSniffer(null, 1, OpenSearchNodesSniffer.Scheme.HTTP);
fail("should have failed");
} catch(NullPointerException e) {
assertEquals("restClient cannot be null", e.getMessage());
@ -96,14 +96,14 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort());
try (RestClient restClient = RestClient.builder(httpHost).build()) {
try {
new ElasticsearchNodesSniffer(restClient, 1, null);
new OpenSearchNodesSniffer(restClient, 1, null);
fail("should have failed");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "scheme cannot be null");
}
try {
new ElasticsearchNodesSniffer(restClient, RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0),
ElasticsearchNodesSniffer.Scheme.HTTP);
new OpenSearchNodesSniffer(restClient, RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0),
OpenSearchNodesSniffer.Scheme.HTTP);
fail("should have failed");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0");
@ -114,7 +114,7 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
public void testSniffNodes() throws IOException {
HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort());
try (RestClient restClient = RestClient.builder(httpHost).build()) {
ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer(restClient, sniffRequestTimeout, scheme);
OpenSearchNodesSniffer sniffer = new OpenSearchNodesSniffer(restClient, sniffRequestTimeout, scheme);
try {
List<Node> sniffedNodes = sniffer.sniff();
if (sniffResponse.isFailure) {
@ -171,7 +171,7 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
}
}
private static SniffResponse buildSniffResponse(ElasticsearchNodesSniffer.Scheme scheme) throws IOException {
private static SniffResponse buildSniffResponse(OpenSearchNodesSniffer.Scheme scheme) throws IOException {
int numNodes = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
List<Node> nodes = new ArrayList<>(numNodes);
JsonFactory jsonFactory = new JsonFactory();
@ -179,7 +179,7 @@ public class ElasticsearchNodesSnifferTests extends RestClientTestCase {
JsonGenerator generator = jsonFactory.createGenerator(writer);
generator.writeStartObject();
if (getRandom().nextBoolean()) {
generator.writeStringField("cluster_name", "elasticsearch");
generator.writeStringField("cluster_name", "opensearch");
}
if (getRandom().nextBoolean()) {
generator.writeObjectFieldStart("bogus_object");

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.apache.http.HttpHost;
import org.elasticsearch.client.Node;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import org.apache.http.HttpHost;

View File

@ -17,14 +17,14 @@
* under the License.
*/
package org.elasticsearch.client.sniff;
package org.opensearch.client.sniff;
import org.apache.http.HttpHost;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientTestCase;
import org.elasticsearch.client.sniff.Sniffer.DefaultScheduler;
import org.elasticsearch.client.sniff.Sniffer.Scheduler;
import org.opensearch.client.sniff.Sniffer.DefaultScheduler;
import org.opensearch.client.sniff.Sniffer.Scheduler;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;

View File

@ -17,15 +17,15 @@
* under the License.
*/
package org.elasticsearch.client.sniff.documentation;
package org.opensearch.client.sniff.documentation;
import org.apache.http.HttpHost;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer;
import org.elasticsearch.client.sniff.NodesSniffer;
import org.elasticsearch.client.sniff.SniffOnFailureListener;
import org.elasticsearch.client.sniff.Sniffer;
import org.opensearch.client.sniff.OpenSearchNodesSniffer;
import org.opensearch.client.sniff.NodesSniffer;
import org.opensearch.client.sniff.SniffOnFailureListener;
import org.opensearch.client.sniff.Sniffer;
import java.io.IOException;
import java.util.List;
@ -94,10 +94,10 @@ public class SnifferDocumentation {
RestClient restClient = RestClient.builder(
new HttpHost("localhost", 9200, "http"))
.build();
NodesSniffer nodesSniffer = new ElasticsearchNodesSniffer(
NodesSniffer nodesSniffer = new OpenSearchNodesSniffer(
restClient,
ElasticsearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT,
ElasticsearchNodesSniffer.Scheme.HTTPS);
OpenSearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT,
OpenSearchNodesSniffer.Scheme.HTTPS);
Sniffer sniffer = Sniffer.builder(restClient)
.setNodesSniffer(nodesSniffer).build();
//end::sniffer-https
@ -107,10 +107,10 @@ public class SnifferDocumentation {
RestClient restClient = RestClient.builder(
new HttpHost("localhost", 9200, "http"))
.build();
NodesSniffer nodesSniffer = new ElasticsearchNodesSniffer(
NodesSniffer nodesSniffer = new OpenSearchNodesSniffer(
restClient,
TimeUnit.SECONDS.toMillis(5),
ElasticsearchNodesSniffer.Scheme.HTTP);
OpenSearchNodesSniffer.Scheme.HTTP);
Sniffer sniffer = Sniffer.builder(restClient)
.setNodesSniffer(nodesSniffer).build();
//end::sniff-request-timeout

View File

@ -1,5 +1,5 @@
{
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"qr-SOrELSaGW8SlU8nflBw": {
"name": "m1",

View File

@ -4,7 +4,7 @@
"successful": 8,
"failed": 0
},
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"0S4r3NurTYSFSb8R9SxwWA": {
"name": "m1",

View File

@ -4,7 +4,7 @@
"successful": 8,
"failed": 0
},
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"ikXK_skVTfWkhONhldnbkw": {
"name": "m1",

View File

@ -4,7 +4,7 @@
"successful": 8,
"failed": 0
},
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"ikXK_skVTfWkhONhldnbkw": {
"name": "m1",

View File

@ -2,14 +2,14 @@
# Recreates the v_nodes_http.json files in this directory. This is
# meant to be an "every once in a while" thing that we do only when
# we want to add a new version of Elasticsearch or configure the
# we want to add a new version of OpenSearch or configure the
# nodes differently. That is why we don't do this in gradle. It also
# allows us to play fast and loose with error handling. If something
# goes wrong you have to manually clean up which is good because it
# leaves around the kinds of things that we need to debug the failure.
# I built this file so the next time I have to regenerate these
# v_nodes_http.json files I won't have to reconfigure Elasticsearch
# v_nodes_http.json files I won't have to reconfigure OpenSearch
# from scratch. While I was at it I took the time to make sure that
# when we do rebuild the files they don't jump around too much. That
# way the diffs are smaller.
@ -40,11 +40,11 @@ function do_version() {
mkdir -p ${version}
pushd ${version} >> /dev/null
tar xf ../elasticsearch-${version}.tar.gz
tar xf ../opensearch-${version}.tar.gz
local http_port=9200
for node in ${nodes}; do
mkdir ${node}
cp -r elasticsearch-${version}/* ${node}
cp -r opensearch-${version}/* ${node}
local master=$([[ "$node" =~ ^m.* ]] && echo true || echo false)
local data=$([[ "$node" =~ ^d.* ]] && echo true || echo false)
# m2 is always master and data for these test just so we have a node like that
@ -52,7 +52,7 @@ function do_version() {
local attr=$([ ${version} == '2.0.0' ] && echo '' || echo '.attr')
local transport_port=$((http_port+100))
cat >> ${node}/config/elasticsearch.yml << __ES_YML
cat >> ${node}/config/opensearch.yml << __ES_YML
node.name: ${node}
node.master: ${master}
node.data: ${data}
@ -70,7 +70,7 @@ __ES_YML
fi
echo "starting ${version}/${node}..."
${node}/bin/elasticsearch -d -p ${node}/pidfile
${node}/bin/opensearch -d -p ${node}/pidfile
((http_port++))
done

View File

@ -4,7 +4,7 @@
"successful": 8,
"failed": 0
},
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"ikXK_skVTfWkhONhldnbkw": {
"name": "m1",

View File

@ -4,7 +4,7 @@
"successful": 8,
"failed": 0
},
"cluster_name": "elasticsearch",
"cluster_name": "opensearch",
"nodes": {
"ikXK_skVTfWkhONhldnbkw": {
"name": "m1",

View File

@ -1,6 +1,6 @@
`*_node_http.json` contains files created by spinning up toy clusters with a
few nodes in different configurations locally at various versions. They are
for testing `ElasticsearchNodesSniffer` against different versions of
Elasticsearch.
for testing `OpenSearchNodesSniffer` against different versions of
OpenSearch.
See create_test_nodes_info.bash for how to create these.

View File

@ -31,7 +31,7 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer;
import org.opensearch.client.sniff.OpenSearchNodesSniffer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
@ -431,10 +431,10 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
* {@link RestClientBuilder} for a client with that metadata.
*/
protected final RestClientBuilder getClientBuilderWithSniffedHosts() throws IOException {
ElasticsearchNodesSniffer.Scheme scheme =
ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT));
ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer(
adminClient(), ElasticsearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, scheme);
OpenSearchNodesSniffer.Scheme scheme =
OpenSearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT));
OpenSearchNodesSniffer sniffer = new OpenSearchNodesSniffer(
adminClient(), OpenSearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, scheme);
RestClientBuilder builder = RestClient.builder(sniffer.sniff().toArray(new Node[0]));
configureClient(builder, restClientSettings());
return builder;