Merge branch 'master' into 2975-attribution

This commit is contained in:
Tadgh 2021-09-10 13:05:38 -04:00 committed by GitHub
commit 86ab77446e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 250 additions and 119 deletions

View File

@ -282,7 +282,7 @@ public abstract class BaseApp {
} }
private Optional<BaseCommand> parseCommand(String[] theArgs) { private Optional<BaseCommand> parseCommand(String[] theArgs) {
Optional<BaseCommand> commandOpt = getNextCommand(theArgs); Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 0);
if (! commandOpt.isPresent()) { if (! commandOpt.isPresent()) {
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE); String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
@ -294,8 +294,8 @@ public abstract class BaseApp {
return commandOpt; return commandOpt;
} }
private Optional<BaseCommand> getNextCommand(String[] theArgs) { private Optional<BaseCommand> getNextCommand(String[] theArgs, int thePosition) {
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[0])).findFirst(); return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])).findFirst();
} }
private void processHelp(String[] theArgs) { private void processHelp(String[] theArgs) {
@ -303,7 +303,7 @@ public abstract class BaseApp {
logUsage(); logUsage();
return; return;
} }
Optional<BaseCommand> commandOpt = getNextCommand(theArgs); Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 1);
if (! commandOpt.isPresent()) { if (! commandOpt.isPresent()) {
String message = "Unknown command: " + theArgs[1]; String message = "Unknown command: " + theArgs[1];
System.err.println(message); System.err.println(message);

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.cli;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class BaseAppTest {
private final PrintStream standardOut = System.out;
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
@BeforeEach
public void setUp() {
System.setOut(new PrintStream(outputStreamCaptor));
}
@AfterEach
public void tearDown() {
System.setOut(standardOut);
}
@Test
public void testHelpOption() {
App.main(new String[]{"help", "create-package"});
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
}
}

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 2973
title: "CLI `smileutil help {command}` returns `Unknown command` which should return the usage of `command`. This has been corrected."

View File

@ -0,0 +1,3 @@
---
type: fix
title: "Fixed a bug where two identical tags in parallel entries being created in a batch would fail."

View File

@ -31,12 +31,11 @@ In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need
```java ```java
@Bean() @Bean()
public ElasticsearchSvcImpl elasticsearchSvc() { public ElasticsearchSvcImpl elasticsearchSvc() {
String elasticsearchHost = "localhost"; String elasticsearchHost = "localhost:9200";
String elasticsearchUserId = "elastic"; String elasticsearchUsername = "elastic";
String elasticsearchPassword = "changeme"; String elasticsearchPassword = "changeme";
int elasticsearchPort = 9301;
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword); return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchUsername, elasticsearchPassword);
} }
``` ```

View File

@ -366,8 +366,8 @@ public abstract class BaseTransactionProcessor {
IBase nextRequestEntry = null; IBase nextRequestEntry = null;
for (int i=0; i<requestEntriesSize; i++ ) { for (int i=0; i<requestEntriesSize; i++ ) {
nextRequestEntry = requestEntries.get(i); nextRequestEntry = requestEntries.get(i);
BundleTask bundleTask = new BundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode); RetriableBundleTask retriableBundleTask = new RetriableBundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
getTaskExecutor().execute(bundleTask); getTaskExecutor().execute(retriableBundleTask);
} }
// waiting for all tasks to be completed // waiting for all tasks to be completed
@ -1683,7 +1683,7 @@ public abstract class BaseTransactionProcessor {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode)); return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
} }
public class BundleTask implements Runnable { public class RetriableBundleTask implements Runnable {
private final CountDownLatch myCompletedLatch; private final CountDownLatch myCompletedLatch;
private final RequestDetails myRequestDetails; private final RequestDetails myRequestDetails;
@ -1691,20 +1691,19 @@ public abstract class BaseTransactionProcessor {
private final Map<Integer, Object> myResponseMap; private final Map<Integer, Object> myResponseMap;
private final int myResponseOrder; private final int myResponseOrder;
private final boolean myNestedMode; private final boolean myNestedMode;
private BaseServerResponseException myLastSeenException;
protected BundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) { protected RetriableBundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
this.myCompletedLatch = theCompletedLatch; this.myCompletedLatch = theCompletedLatch;
this.myRequestDetails = theRequestDetails; this.myRequestDetails = theRequestDetails;
this.myNextReqEntry = theNextReqEntry; this.myNextReqEntry = theNextReqEntry;
this.myResponseMap = theResponseMap; this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder; this.myResponseOrder = theResponseOrder;
this.myNestedMode = theNestedMode; this.myNestedMode = theNestedMode;
this.myLastSeenException = null;
} }
@Override private void processBatchEntry() {
public void run() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, myNextReqEntry); myVersionAdapter.addEntry(subRequestBundle, myNextReqEntry);
@ -1720,22 +1719,45 @@ public abstract class BaseTransactionProcessor {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry); myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry);
} }
}
private boolean processBatchEntryWithRetry() {
int maxAttempts =3;
for (int attempt = 1;; attempt++) {
try {
processBatchEntry();
return true;
} catch (BaseServerResponseException e) { } catch (BaseServerResponseException e) {
caughtEx.setException(e); //If we catch a known and structured exception from HAPI, just fail.
myLastSeenException = e;
return false;
} catch (Throwable t) { } catch (Throwable t) {
myLastSeenException = new InternalErrorException(t);
//If we have caught a non-tag-storage failure we are unfamiliar with, or we have exceeded max attempts, exit.
if (!DaoFailureUtil.isTagStorageFailure(t) || attempt >= maxAttempts) {
ourLog.error("Failure during BATCH sub transaction processing", t); ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx.setException(new InternalErrorException(t)); return false;
}
}
}
} }
if (caughtEx.getException() != null) { @Override
// add exception to the response map public void run() {
myResponseMap.put(myResponseOrder, caughtEx); boolean success = processBatchEntryWithRetry();
if (!success) {
populateResponseMapWithLastSeenException();
} }
// checking for the parallelism // checking for the parallelism
ourLog.debug("processing bacth for {} is completed", myVersionAdapter.getEntryRequestUrl(myNextReqEntry)); ourLog.debug("processing batch for {} is completed", myVersionAdapter.getEntryRequestUrl(myNextReqEntry));
myCompletedLatch.countDown(); myCompletedLatch.countDown();
} }
private void populateResponseMapWithLastSeenException() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
caughtEx.setException(myLastSeenException);
myResponseMap.put(myResponseOrder, caughtEx);
}
} }
} }

View File

@ -0,0 +1,18 @@
package ca.uhn.fhir.jpa.dao;
import org.apache.commons.lang3.StringUtils;
/**
* Utility class to help identify classes of failure.
*/
public class DaoFailureUtil {
public static boolean isTagStorageFailure(Throwable t) {
if (StringUtils.isBlank(t.getMessage())) {
return false;
} else {
String msg = t.getMessage().toLowerCase();
return msg.contains("hfj_tag_def") || msg.contains("hfj_res_tag");
}
}
}

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy; import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
@ -93,10 +94,9 @@ public class HapiTransactionService {
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF, * known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
* which is the tag definition table. In that case, a constraint error will be * which is the tag definition table. In that case, a constraint error will be
* thrown by one of the client threads, so we auto-retry in order to avoid * thrown by one of the client threads, so we auto-retry in order to avoid
* annopying spurious failures for the client. * annoying spurious failures for the client.
*/ */
if (e.getMessage().contains("HFJ_TAG_DEF") || e.getMessage().contains("hfj_tag_def") || if (DaoFailureUtil.isTagStorageFailure(e)) {
e.getMessage().contains("HFJ_RES_TAG") || e.getMessage().contains("hfj_res_tag")) {
maxRetries = 3; maxRetries = 3;
} }

View File

@ -1087,7 +1087,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
ourLog.trace("Performing count"); ourLog.trace("Performing count");
ISearchBuilder sb = newSearchBuilder(); ISearchBuilder sb = newSearchBuilder();
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId); Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
Long count = countIterator.hasNext() ? countIterator.next() : 0; Long count = countIterator.hasNext() ? countIterator.next() : 0L;
ourLog.trace("Got count {}", count); ourLog.trace("Got count {}", count);
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager); TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);

View File

@ -37,6 +37,7 @@ import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.slf4j.Logger; import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Properties; import java.util.Properties;
@ -52,10 +53,10 @@ public class ElasticsearchHibernatePropertiesBuilder {
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class); private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW; private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW;
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE; private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
private String myRestUrl; private String myHosts;
private String myUsername; private String myUsername;
private String myPassword; private String myPassword;
private long myIndexManagementWaitTimeoutMillis = 10000L; private long myIndexManagementWaitTimeoutMillis = 10000L;
@ -77,11 +78,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
// the below properties are used for ElasticSearch integration // the below properties are used for ElasticSearch integration
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch"); theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName()); theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myHosts);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
if (StringUtils.isNotBlank(myUsername)) { if (StringUtils.isNotBlank(myUsername)) {
@ -102,8 +100,7 @@ public class ElasticsearchHibernatePropertiesBuilder {
//This tells elasticsearch to use our custom index naming strategy. //This tells elasticsearch to use our custom index naming strategy.
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName()); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword); injectStartupTemplate(myProtocol, myHosts, myUsername, myPassword);
} }
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) { public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
@ -111,11 +108,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
return this; return this;
} }
public ElasticsearchHibernatePropertiesBuilder setRestUrl(String theRestUrl) { public ElasticsearchHibernatePropertiesBuilder setHosts(String hosts) {
if (theRestUrl.contains("://")) { myHosts = hosts;
throw new ConfigurationException("Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
}
myRestUrl = theRestUrl;
return this; return this;
} }
@ -150,18 +144,13 @@ public class ElasticsearchHibernatePropertiesBuilder {
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should * TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
* eventually be removed in favour of whatever solution they come up with. * eventually be removed in favour of whatever solution they come up with.
*/ */
void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) { void injectStartupTemplate(String theProtocol, String theHosts, @Nullable String theUsername, @Nullable String thePassword) {
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template") PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*")) .patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50)); .settings(Settings.builder().put("index.max_ngram_diff", 50));
int colonIndex = theHostAndPort.indexOf(":");
String host = theHostAndPort.substring(0, colonIndex);
Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1));
String qualifiedHost = theProtocol + "://" + host;
try { try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword); RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theProtocol, theHosts, theUsername, thePassword);
ourLog.info("Adding starter template for large ngram diffs"); ourLog.info("Adding starter template for large ngram diffs");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT); AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged(); assert acknowledgedResponse.isAcknowledged();

View File

@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.search.lastn;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.ConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope; import org.apache.http.auth.AuthScope;
@ -27,41 +29,47 @@ import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider; import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class ElasticsearchRestClientFactory { public class ElasticsearchRestClientFactory {
private static String determineScheme(String theHostname) { static public RestHighLevelClient createElasticsearchHighLevelRestClient(
int schemeIdx = theHostname.indexOf("://"); String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) {
if (schemeIdx > 0) {
return theHostname.substring(0, schemeIdx); if (hosts.contains("://")) {
} else { throw new ConfigurationException("Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
return "http";
} }
String[] hostArray = hosts.split(",");
List<Node> clientNodes = Arrays.stream(hostArray)
.map(String::trim)
.filter(s -> s.contains(":"))
.map(h -> {
int colonIndex = h.indexOf(":");
String host = h.substring(0, colonIndex);
int port = Integer.parseInt(h.substring(colonIndex + 1));
return new Node(new HttpHost(host, port, protocol));
})
.collect(Collectors.toList());
if (hostArray.length != clientNodes.size()) {
throw new ConfigurationException("Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202");
} }
private static String stripHostOfScheme(String theHostname) { RestClientBuilder clientBuilder = RestClient.builder(clientNodes.toArray(new Node[0]));
int schemeIdx = theHostname.indexOf("://"); if (StringUtils.isNotBlank(theUsername) && StringUtils.isNotBlank(thePassword)) {
if (schemeIdx > 0) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
return theHostname.substring(schemeIdx + 3); credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword));
} else { clientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
return theHostname;
}
}
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
final CredentialsProvider credentialsProvider =
new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(theUsername, thePassword));
RestClientBuilder clientBuilder = RestClient.builder(
new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname)))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider)); .setDefaultCredentialsProvider(credentialsProvider));
}
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")}; Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
clientBuilder.setDefaultHeaders(defaultHeaders); clientBuilder.setDefaultHeaders(defaultHeaders);

View File

@ -68,11 +68,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -125,13 +125,13 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
private PartitionSettings myPartitionSettings; private PartitionSettings myPartitionSettings;
//This constructor used to inject a dummy partitionsettings in test. //This constructor used to inject a dummy partitionsettings in test.
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) { public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
this(theHostname, thePort, theUsername, thePassword); this(theHostname, theUsername, thePassword);
this.myPartitionSettings = thePartitionSetings; this.myPartitionSettings = thePartitionSetings;
} }
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) { public ElasticsearchSvcImpl(String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword); myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", theHostname, theUsername, thePassword);
try { try {
createObservationIndexIfMissing(); createObservationIndexIfMissing();

View File

@ -128,7 +128,7 @@ public class ElasticsearchWithPrefixConfig {
.settings(Settings.builder().put("index.max_ngram_diff", 50)); .settings(Settings.builder().put("index.max_ngram_diff", 50));
try { try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http://" + host, httpPort, "", ""); RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", host + ":" + httpPort, "", "");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT); AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged(); assert acknowledgedResponse.isAcknowledged();
} catch (IOException theE) { } catch (IOException theE) {

View File

@ -41,7 +41,7 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE) .setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
.setIndexManagementWaitTimeoutMillis(10000) .setIndexManagementWaitTimeoutMillis(10000)
.setRequiredIndexStatus(IndexStatus.YELLOW) .setRequiredIndexStatus(IndexStatus.YELLOW)
.setRestUrl(host+ ":" + httpPort) .setHosts(host + ":" + httpPort)
.setProtocol("http") .setProtocol("http")
.setUsername("") .setUsername("")
.setPassword("") .setPassword("")

View File

@ -21,7 +21,7 @@ public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElastic
public ElasticsearchSvcImpl myElasticsearchSvc() { public ElasticsearchSvcImpl myElasticsearchSvc() {
int elasticsearchPort = elasticContainer().getMappedPort(9200); int elasticsearchPort = elasticContainer().getMappedPort(9200);
String host = elasticContainer().getHost(); String host = elasticContainer().getHost();
return new ElasticsearchSvcImpl(host, elasticsearchPort, "", ""); return new ElasticsearchSvcImpl(host + ":" + elasticsearchPort, null, null);
} }
@PreDestroy @PreDestroy

View File

@ -33,7 +33,7 @@ public class ElasticsearchPrefixTest {
public void test() throws IOException { public void test() throws IOException {
//Given //Given
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient( RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(
"http://" + elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", ""); "http", elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), "", "");
//When //When
RestClient lowLevelClient = elasticsearchHighLevelRestClient.getLowLevelClient(); RestClient lowLevelClient = elasticsearchHighLevelRestClient.getLowLevelClient();

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test; import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;

View File

@ -250,6 +250,45 @@ public class ResourceProviderR4BundleTest extends BaseResourceProviderR4Test {
} }
@Test
public void testTagCacheWorksWithBatchMode() {
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
Patient p = new Patient();
p.setId("100");
p.setGender(AdministrativeGender.MALE);
p.addIdentifier().setSystem("urn:foo").setValue("A");
p.addName().setFamily("Smith");
p.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
Patient p2 = new Patient();
p2.setId("200");
p2.setGender(AdministrativeGender.MALE);
p2.addIdentifier().setSystem("urn:foo").setValue("A");
p2.addName().setFamily("Smith");
p2.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
Patient p3 = new Patient();
p3.setId("pat-300");
p3.setGender(AdministrativeGender.MALE);
p3.addIdentifier().setSystem("urn:foo").setValue("A");
p3.addName().setFamily("Smith");
p3.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.PUT).setUrl("Patient/pat-300");
Bundle output = myClient.transaction().withBundle(input).execute();
output.getEntry().stream()
.map(BundleEntryComponent::getResponse)
.map(Bundle.BundleEntryResponseComponent::getStatus)
.forEach(statusCode -> {
assertEquals(statusCode, "201 Created");
});
}
private List<String> createPatients(int count) { private List<String> createPatients(int count) {
List<String> ids = new ArrayList<String>(); List<String> ids = new ArrayList<String>();
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {

View File

@ -23,17 +23,11 @@ class ElasticsearchHibernatePropertiesBuilderTest {
ElasticsearchHibernatePropertiesBuilder myPropertiesBuilder = spy(ElasticsearchHibernatePropertiesBuilder.class); ElasticsearchHibernatePropertiesBuilder myPropertiesBuilder = spy(ElasticsearchHibernatePropertiesBuilder.class);
@BeforeEach
public void prepMocks() {
//ensures we don't try to reach out to a real ES server on apply.
doNothing().when(myPropertiesBuilder).injectStartupTemplate(any(), any(), any(), any());
}
@Test @Test
public void testRestUrlCannotContainProtocol() { public void testHostsCannotContainProtocol() {
String host = "localhost:9200"; String host = "localhost:9200";
String protocolHost = "https://" + host; String protocolHost = "https://" + host;
String failureMessage = "Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL."; String failureMessage = "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.";
myPropertiesBuilder myPropertiesBuilder
.setProtocol("https") .setProtocol("https")
@ -42,19 +36,42 @@ class ElasticsearchHibernatePropertiesBuilderTest {
//SUT //SUT
try { try {
myPropertiesBuilder.setRestUrl(protocolHost); myPropertiesBuilder.setHosts(protocolHost)
.apply(new Properties());
fail(); fail();
} catch (ConfigurationException e ) { } catch (ConfigurationException e ) {
assertThat(e.getMessage(), is(equalTo(failureMessage))); assertThat(e.getMessage(), is(equalTo(failureMessage)));
} }
doNothing().when(myPropertiesBuilder).injectStartupTemplate(any(), any(), any(), any());
Properties properties = new Properties(); Properties properties = new Properties();
myPropertiesBuilder myPropertiesBuilder
.setRestUrl(host) .setHosts(host)
.apply(properties); .apply(properties);
assertThat(properties.getProperty(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS)), is(equalTo(host))); assertThat(properties.getProperty(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS)), is(equalTo(host)));
} }
@Test
public void testHostsValueValidation() {
String host = "localhost_9200,localhost:9201,localhost:9202";
String failureMessage = "Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202";
myPropertiesBuilder
.setProtocol("https")
.setHosts(host)
.setUsername("whatever")
.setPassword("whatever");
//SUT
try {
myPropertiesBuilder
.apply(new Properties());
fail();
} catch (ConfigurationException e ) {
assertThat(e.getMessage(), is(equalTo(failureMessage)));
}
}
} }

View File

@ -68,7 +68,7 @@ public class LastNElasticsearchSvcMultipleObservationsIT {
public void before() throws IOException { public void before() throws IOException {
PartitionSettings partitionSettings = new PartitionSettings(); PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setPartitioningEnabled(false); partitionSettings.setPartitioningEnabled(false);
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", ""); elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), null, null);
if (!indexLoaded) { if (!indexLoaded) {
createMultiplePatientsAndObservations(); createMultiplePatientsAndObservations();

View File

@ -97,7 +97,7 @@ public class LastNElasticsearchSvcSingleObservationIT {
public void before() { public void before() {
PartitionSettings partitionSettings = new PartitionSettings(); PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setPartitioningEnabled(false); partitionSettings.setPartitioningEnabled(false);
elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", ""); elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost() + ":" + elasticsearchContainer.getMappedPort(9200), "", "");
} }
@AfterEach @AfterEach