Merge pull request #2976 from bratwurzt/issue_2975

Empty/null username and password in ElasticsearchHibernatePropertiesBuilder throws exception, multiple hosts not possible
This commit is contained in:
Tadgh 2021-09-10 10:55:05 -04:00 committed by GitHub
commit 3e55f47a03
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 91 additions and 78 deletions

View File

@ -31,12 +31,11 @@ In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need
```java
@Bean()
public ElasticsearchSvcImpl elasticsearchSvc() {
String elasticsearchHost = "localhost";
String elasticsearchUserId = "elastic";
String elasticsearchHost = "localhost:9200";
String elasticsearchUsername = "elastic";
String elasticsearchPassword = "changeme";
int elasticsearchPort = 9301;
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchUsername, elasticsearchPassword);
}
```

View File

@ -1087,7 +1087,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
ourLog.trace("Performing count");
ISearchBuilder sb = newSearchBuilder();
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
Long count = countIterator.hasNext() ? countIterator.next() : 0;
Long count = countIterator.hasNext() ? countIterator.next() : 0L;
ourLog.trace("Got count {}", count);
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);

View File

@ -37,6 +37,7 @@ import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
@ -49,13 +50,13 @@ import static org.slf4j.LoggerFactory.getLogger;
* FHIR JPA server. This class also injects a starter template into the ES cluster.
*/
public class ElasticsearchHibernatePropertiesBuilder {
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW;
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW;
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
private String myRestUrl;
private String myHosts;
private String myUsername;
private String myPassword;
private long myIndexManagementWaitTimeoutMillis = 10000L;
@ -77,11 +78,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
// the below properties are used for ElasticSearch integration
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myHosts);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
if (StringUtils.isNotBlank(myUsername)) {
@ -102,8 +100,7 @@ public class ElasticsearchHibernatePropertiesBuilder {
//This tells elasticsearch to use our custom index naming strategy.
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword);
injectStartupTemplate(myProtocol, myHosts, myUsername, myPassword);
}
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
@ -111,11 +108,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
return this;
}
public ElasticsearchHibernatePropertiesBuilder setRestUrl(String theRestUrl) {
if (theRestUrl.contains("://")) {
throw new ConfigurationException("Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
}
myRestUrl = theRestUrl;
public ElasticsearchHibernatePropertiesBuilder setHosts(String hosts) {
myHosts = hosts;
return this;
}
@ -150,18 +144,13 @@ public class ElasticsearchHibernatePropertiesBuilder {
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
* eventually be removed in favour of whatever solution they come up with.
*/
void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) {
void injectStartupTemplate(String theProtocol, String theHosts, @Nullable String theUsername, @Nullable String thePassword) {
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50));
int colonIndex = theHostAndPort.indexOf(":");
String host = theHostAndPort.substring(0, colonIndex);
Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1));
String qualifiedHost = theProtocol + "://" + host;
try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword);
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theProtocol, theHosts, theUsername, thePassword);
ourLog.info("Adding starter template for large ngram diffs");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged();

View File

@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.search.lastn;
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
@ -27,45 +29,51 @@ import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class ElasticsearchRestClientFactory {
private static String determineScheme(String theHostname) {
int schemeIdx = theHostname.indexOf("://");
if (schemeIdx > 0) {
return theHostname.substring(0, schemeIdx);
} else {
return "http";
static public RestHighLevelClient createElasticsearchHighLevelRestClient(
String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) {
if (hosts.contains("://")) {
throw new ConfigurationException("Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
}
}
private static String stripHostOfScheme(String theHostname) {
int schemeIdx = theHostname.indexOf("://");
if (schemeIdx > 0) {
return theHostname.substring(schemeIdx + 3);
} else {
return theHostname;
String[] hostArray = hosts.split(",");
List<Node> clientNodes = Arrays.stream(hostArray)
.map(String::trim)
.filter(s -> s.contains(":"))
.map(h -> {
int colonIndex = h.indexOf(":");
String host = h.substring(0, colonIndex);
int port = Integer.parseInt(h.substring(colonIndex + 1));
return new Node(new HttpHost(host, port, protocol));
})
.collect(Collectors.toList());
if (hostArray.length != clientNodes.size()) {
throw new ConfigurationException("Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202");
}
RestClientBuilder clientBuilder = RestClient.builder(clientNodes.toArray(new Node[0]));
if (StringUtils.isNotBlank(theUsername) && StringUtils.isNotBlank(thePassword)) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword));
clientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider));
}
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
clientBuilder.setDefaultHeaders(defaultHeaders);
return new RestHighLevelClient(clientBuilder);
}
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
final CredentialsProvider credentialsProvider =
new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(theUsername, thePassword));
RestClientBuilder clientBuilder = RestClient.builder(
new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname)))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider));
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
clientBuilder.setDefaultHeaders(defaultHeaders);
return new RestHighLevelClient(clientBuilder);
}
}

View File

@ -68,11 +68,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
@ -125,13 +125,13 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
private PartitionSettings myPartitionSettings;
//This constructor used to inject a dummy partitionsettings in test.
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) {
this(theHostname, thePort, theUsername, thePassword);
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
this(theHostname, theUsername, thePassword);
this.myPartitionSettings = thePartitionSetings;
}
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword);
public ElasticsearchSvcImpl(String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", theHostname, theUsername, thePassword);
try {
createObservationIndexIfMissing();

View File

@ -128,7 +128,7 @@ public class ElasticsearchWithPrefixConfig {
.settings(Settings.builder().put("index.max_ngram_diff", 50));
try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http://" + host, httpPort, "", "");
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", host + ":" + httpPort, "", "");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged();
} catch (IOException theE) {

View File

@ -41,7 +41,7 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
.setIndexManagementWaitTimeoutMillis(10000)
.setRequiredIndexStatus(IndexStatus.YELLOW)
.setRestUrl(host+ ":" + httpPort)
.setHosts(host + ":" + httpPort)
.setProtocol("http")
.setUsername("")
.setPassword("")

View File

@ -21,7 +21,7 @@ public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElastic
public ElasticsearchSvcImpl myElasticsearchSvc() {
int elasticsearchPort = elasticContainer().getMappedPort(9200);
String host = elasticContainer().getHost();
return new ElasticsearchSvcImpl(host, elasticsearchPort, "", "");
return new ElasticsearchSvcImpl(host + ":" + elasticsearchPort, null, null);
}
@PreDestroy

View File

@ -33,7 +33,7 @@ public class ElasticsearchPrefixTest {
public void test() throws IOException {
//Given
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(
"http://" + elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", "");
"http", elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), "", "");
//When
RestClient lowLevelClient = elasticsearchHighLevelRestClient.getLowLevelClient();

View File

@ -23,17 +23,11 @@ class ElasticsearchHibernatePropertiesBuilderTest {
ElasticsearchHibernatePropertiesBuilder myPropertiesBuilder = spy(ElasticsearchHibernatePropertiesBuilder.class);
@BeforeEach
public void prepMocks() {
//ensures we don't try to reach out to a real ES server on apply.
doNothing().when(myPropertiesBuilder).injectStartupTemplate(any(), any(), any(), any());
}
@Test
public void testRestUrlCannotContainProtocol() {
public void testHostsCannotContainProtocol() {
String host = "localhost:9200";
String protocolHost = "https://" + host;
String failureMessage = "Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.";
String failureMessage = "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.";
myPropertiesBuilder
.setProtocol("https")
@ -42,19 +36,42 @@ class ElasticsearchHibernatePropertiesBuilderTest {
//SUT
try {
myPropertiesBuilder.setRestUrl(protocolHost);
myPropertiesBuilder.setHosts(protocolHost)
.apply(new Properties());
fail();
} catch (ConfigurationException e ) {
assertThat(e.getMessage(), is(equalTo(failureMessage)));
}
doNothing().when(myPropertiesBuilder).injectStartupTemplate(any(), any(), any(), any());
Properties properties = new Properties();
myPropertiesBuilder
.setRestUrl(host)
.setHosts(host)
.apply(properties);
assertThat(properties.getProperty(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS)), is(equalTo(host)));
}
@Test
public void testHostsValueValidation() {
String host = "localhost_9200,localhost:9201,localhost:9202";
String failureMessage = "Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202";
myPropertiesBuilder
.setProtocol("https")
.setHosts(host)
.setUsername("whatever")
.setPassword("whatever");
//SUT
try {
myPropertiesBuilder
.apply(new Properties());
fail();
} catch (ConfigurationException e ) {
assertThat(e.getMessage(), is(equalTo(failureMessage)));
}
}
}

View File

@ -68,7 +68,7 @@ public class LastNElasticsearchSvcMultipleObservationsIT {
public void before() throws IOException {
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setPartitioningEnabled(false);
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", "");
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), null, null);
if (!indexLoaded) {
createMultiplePatientsAndObservations();

View File

@ -97,7 +97,7 @@ public class LastNElasticsearchSvcSingleObservationIT {
public void before() {
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setPartitioningEnabled(false);
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", "");
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), "", "");
}
@AfterEach