Merge branch 'master' into 2849_add_new_mdm_param

This commit is contained in:
Jaison B 2021-09-17 10:18:24 -06:00
commit 8bb9aca8df
274 changed files with 53127 additions and 2151 deletions

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -225,6 +225,14 @@ public class FhirContext {
} }
/**
* @since 5.6.0
*/
public static FhirContext forDstu2Cached() {
return forCached(FhirVersionEnum.DSTU2);
}
/** /**
* @since 5.5.0 * @since 5.5.0
*/ */

View File

@ -73,7 +73,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
class ModelScanner { class ModelScanner {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ModelScanner.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ModelScanner.class);
private Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinitions = new HashMap<>(); private Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinitions = new HashMap<>();
private FhirContext myContext; private FhirContext myContext;
private Map<String, RuntimeResourceDefinition> myIdToResourceDefinition = new HashMap<>(); private Map<String, RuntimeResourceDefinition> myIdToResourceDefinition = new HashMap<>();
@ -90,6 +89,7 @@ class ModelScanner {
@Nonnull Collection<Class<? extends IBase>> theResourceTypes) throws ConfigurationException { @Nonnull Collection<Class<? extends IBase>> theResourceTypes) throws ConfigurationException {
myContext = theContext; myContext = theContext;
myVersion = theVersion; myVersion = theVersion;
Set<Class<? extends IBase>> toScan = new HashSet<>(theResourceTypes); Set<Class<? extends IBase>> toScan = new HashSet<>(theResourceTypes);
init(theExistingDefinitions, toScan); init(theExistingDefinitions, toScan);
} }
@ -405,8 +405,8 @@ class ModelScanner {
List<RuntimeSearchParam.Component> components = null; List<RuntimeSearchParam.Component> components = null;
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) { if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
components = new ArrayList<>(); components = new ArrayList<>();
for (String next : searchParam.compositeOf()) { for (String name : searchParam.compositeOf()) {
String ref = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + next; String ref = toCanonicalSearchParameterUri(theResourceDef, name);
components.add(new RuntimeSearchParam.Component(null, ref)); components.add(new RuntimeSearchParam.Component(null, ref));
} }
} }
@ -414,7 +414,8 @@ class ModelScanner {
Collection<String> base = Collections.singletonList(theResourceDef.getName()); Collection<String> base = Collections.singletonList(theResourceDef.getName());
String url = null; String url = null;
if (theResourceDef.isStandardType()) { if (theResourceDef.isStandardType()) {
url = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + searchParam.name(); String name = searchParam.name();
url = toCanonicalSearchParameterUri(theResourceDef, name);
} }
RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base); RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base);
theResourceDef.addSearchParam(param); theResourceDef.addSearchParam(param);
@ -424,6 +425,10 @@ class ModelScanner {
} }
private String toCanonicalSearchParameterUri(RuntimeResourceDefinition theResourceDef, String theName) {
return "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName() + "-" + theName;
}
private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) { private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) {
HashSet<String> retVal = new HashSet<>(); HashSet<String> retVal = new HashSet<>();

View File

@ -233,18 +233,7 @@ public class RuntimeSearchParam {
} }
public List<String> getPathsSplit() { public List<String> getPathsSplit() {
String path = getPath(); return getPathsSplitForResourceType(null);
if (path.indexOf('|') == -1) {
return Collections.singletonList(path);
}
List<String> retVal = new ArrayList<>();
StringTokenizer tok = new StringTokenizer(path, "|");
while (tok.hasMoreElements()) {
String nextPath = tok.nextToken().trim();
retVal.add(nextPath.trim());
}
return retVal;
} }
/** /**
@ -266,6 +255,41 @@ public class RuntimeSearchParam {
return myPhoneticEncoder.encode(theString); return myPhoneticEncoder.encode(theString);
} }
public List<String> getPathsSplitForResourceType(@Nullable String theResourceName) {
String path = getPath();
if (path.indexOf('|') == -1) {
if (theResourceName != null && !pathMatchesResourceType(theResourceName, path)) {
return Collections.emptyList();
}
return Collections.singletonList(path);
}
List<String> retVal = new ArrayList<>();
StringTokenizer tok = new StringTokenizer(path, "|");
while (tok.hasMoreElements()) {
String nextPath = tok.nextToken().trim();
if (theResourceName != null && !pathMatchesResourceType(theResourceName, nextPath)) {
continue;
}
retVal.add(nextPath.trim());
}
return retVal;
}
private boolean pathMatchesResourceType(String theResourceName, String thePath) {
if (thePath.startsWith(theResourceName + ".")) {
return true;
}
if (thePath.startsWith("Resouce.") || thePath.startsWith("DomainResource.")) {
return true;
}
if (Character.isLowerCase(thePath.charAt(0))) {
return true;
}
return false;
}
public enum RuntimeSearchParamStatusEnum { public enum RuntimeSearchParamStatusEnum {
ACTIVE, ACTIVE,
DRAFT, DRAFT,

View File

@ -961,6 +961,7 @@ public class FhirTerser {
for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) { for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
List<?> values = nextChild.getAccessor().getValues(theElement); List<?> values = nextChild.getAccessor().getValues(theElement);
if (values != null) { if (values != null) {
for (Object nextValueObject : values) { for (Object nextValueObject : values) {
IBase nextValue; IBase nextValue;

View File

@ -28,13 +28,6 @@ import ca.uhn.fhir.rest.gclient.TokenClientParam;
*/ */
public interface IAnyResource extends IBaseResource { public interface IAnyResource extends IBaseResource {
/**
* Search parameter constant for <b>_language</b>
*/
@SearchParamDefinition(name="_language", path="", description="The language of the resource", type="string" )
String SP_RES_LANGUAGE = "_language";
/** /**
* Search parameter constant for <b>_id</b> * Search parameter constant for <b>_id</b>
*/ */

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -282,7 +282,7 @@ public abstract class BaseApp {
} }
private Optional<BaseCommand> parseCommand(String[] theArgs) { private Optional<BaseCommand> parseCommand(String[] theArgs) {
Optional<BaseCommand> commandOpt = getNextCommand(theArgs); Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 0);
if (! commandOpt.isPresent()) { if (! commandOpt.isPresent()) {
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE); String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
@ -294,8 +294,8 @@ public abstract class BaseApp {
return commandOpt; return commandOpt;
} }
private Optional<BaseCommand> getNextCommand(String[] theArgs) { private Optional<BaseCommand> getNextCommand(String[] theArgs, int thePosition) {
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[0])).findFirst(); return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])).findFirst();
} }
private void processHelp(String[] theArgs) { private void processHelp(String[] theArgs) {
@ -303,7 +303,7 @@ public abstract class BaseApp {
logUsage(); logUsage();
return; return;
} }
Optional<BaseCommand> commandOpt = getNextCommand(theArgs); Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 1);
if (! commandOpt.isPresent()) { if (! commandOpt.isPresent()) {
String message = "Unknown command: " + theArgs[1]; String message = "Unknown command: " + theArgs[1];
System.err.println(message); System.err.println(message);

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.cli;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class BaseAppTest {
private final PrintStream standardOut = System.out;
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
@BeforeEach
public void setUp() {
System.setOut(new PrintStream(outputStreamCaptor));
}
@AfterEach
public void tearDown() {
System.setOut(standardOut);
}
@Test
public void testHelpOption() {
App.main(new String[]{"help", "create-package"});
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
}
}

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -30,6 +30,7 @@ import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -65,8 +66,8 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource); retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);

View File

@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -62,8 +63,8 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource); retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -60,8 +61,8 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource); retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -0,0 +1,5 @@
---
type: perf
issue: 2457
title: "A regression in HAPI FHIR 5.3.0 resulted in concurrent searches being executed in a sequential
(and not parallel) fashion in some circumstances."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2790
title: "The SearchParameter canonical URLs exported by the JPA server have been adjusted to match the URLs
specified in the FHIR specification."

View File

@ -0,0 +1,7 @@
---
type: change
issue: 2790
title: "Support for the `_language` search parameter has been dropped from the JPA server. This search parameter
was specified in FHIR DSTU1 but was dropped in later versions. It is rarely used in practice and imposes
an indexing cost, so it has now been removed. A custom search parameter may be used in order to achieve
the same functionality if needed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2901
jira: SMILE-3004
title: "Processing transactions with AutoversionAtPaths set should create those resources (if AutoCreatePlaceholders is set) and use latest version as expected"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 2958
jira: SMILE-643
title: "Fixed issue where the processing of queries like Procedure?patient= before a cache search would cause the parameter key to be removed.
Additionally, ensured that requests like Procedure?patient= cause HTTP 400 Bad Request instead of HTTP 500 Internal Error."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2962
jira: SMILE-720
title: "Added a new DaoConfig setting called `setElasticSearchIndexPrefix(String prefix)` which will cause Hibernate search to prefix all of its tables with the provided value."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 2967
jira: SMILE-2899
title: "Previously, the system would only traverse references to discrete resources while performing a chained search.
This fix adds support for traversing references to contained resources as well, with the limitation that the reference
to the contained resource must be the last reference in the chain."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 2973
title: "CLI `smileutil help {command}` returns `Unknown command` which should return the usage of `command`. This has been corrected."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 2975
title: "Two improvements have been made to the connection to Elasticsearch. First, null username and password values are now permitted. Second, multiple hosts are now permitted via the `setHosts()` method on the ElasticHibernatePropertiesBuilder, allowing you to
connect to multiple elasticsearch clusters at once. Thanks to Dušan Marković for the contribution!"

View File

@ -0,0 +1,3 @@
---
type: fix
title: "Fixed a bug where two identical tags in parallel entries being created in a batch would fail."

View File

@ -0,0 +1,5 @@
---
type: change
jira: SMILE-2927
title: "During transactions, any resources that were PUT or POSTed with a conditional URL now receive extra validation. There is now a final
storage step which ensures that the stored resource actually matches the conditional URL."

View File

@ -0,0 +1,6 @@
---
type: change
issue: 2991
title: "This PR eliminates the search coordinator threadpool, and executes searches synchronously on the HTTP client
thread. The idea of using a separate pool was supposed to help improve server scalability, but ultimately created
false bottlenecks and reduced the utility of monitoring infrastructure so it has been eliminated."

View File

@ -31,12 +31,11 @@ In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need
```java ```java
@Bean() @Bean()
public ElasticsearchSvcImpl elasticsearchSvc() { public ElasticsearchSvcImpl elasticsearchSvc() {
String elasticsearchHost = "localhost"; String elasticsearchHost = "localhost:9200";
String elasticsearchUserId = "elastic"; String elasticsearchUsername = "elastic";
String elasticsearchPassword = "changeme"; String elasticsearchPassword = "changeme";
int elasticsearchPort = 9301;
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword); return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchUsername, elasticsearchPassword);
} }
``` ```

View File

@ -302,6 +302,14 @@ If the server has been configured with a [Resource Server ID Strategy](/apidocs/
Contains the specific version (starting with 1) of the resource that this row corresponds to. Contains the specific version (starting with 1) of the resource that this row corresponds to.
</td> </td>
</tr> </tr>
<tr>
<td>RESOURCE_TYPE</td>
<td></td>
<td>String</td>
<td>
Contains the string specifying the type of the resource (Patient, Observation, etc).
</td>
</tr>
</tbody> </tbody>
</table> </table>
@ -476,7 +484,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
<tr> <tr>
<td>RES_ID</td> <td>RES_ID</td>
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td> <td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
<td>String</td> <td>Long</td>
<td></td> <td></td>
<td> <td>
Contains the PID of the resource being indexed. Contains the PID of the resource being indexed.

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -258,6 +258,11 @@ public class DaoConfig {
private boolean myAccountForDateIndexNulls; private boolean myAccountForDateIndexNulls;
private boolean myTriggerSubscriptionsForNonVersioningChanges; private boolean myTriggerSubscriptionsForNonVersioningChanges;
/**
* @since 5.6.0
*/
private String myElasicSearchIndexPrefix;
/** /**
* @since 5.6.0 * @since 5.6.0
*/ */
@ -269,6 +274,7 @@ public class DaoConfig {
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE; private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE; private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
/** /**
* Constructor * Constructor
*/ */
@ -2643,6 +2649,28 @@ public class DaoConfig {
return retval; return retval;
} }
/**
*
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
* and terminology expansion indexes.
*
* @since 5.6.0
*/
public String getElasticSearchIndexPrefix() {
return myElasicSearchIndexPrefix;
}
/**
*
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
* and terminology expansion indexes.
*
* @since 5.6.0
*/
public void setElasticSearchIndexPrefix(String thePrefix) {
myElasicSearchIndexPrefix = thePrefix;
}
public enum StoreMetaSourceInformationEnum { public enum StoreMetaSourceInformationEnum {
NONE(false, false), NONE(false, false),
SOURCE_URI(true, false), SOURCE_URI(true, false),

View File

@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.api.dao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.rest.annotation.Offset;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;

View File

@ -52,13 +52,11 @@ public class LazyDaoMethodOutcome extends DaoMethodOutcome {
private void tryToRunSupplier() { private void tryToRunSupplier() {
if (myEntitySupplier != null) { if (myEntitySupplier != null) {
EntityAndResource entityAndResource = myEntitySupplier.get(); EntityAndResource entityAndResource = myEntitySupplier.get();
setEntity(entityAndResource.getEntity()); setEntity(entityAndResource.getEntity());
setResource(entityAndResource.getResource()); setResource(entityAndResource.getResource());
setId(entityAndResource.getResource().getIdElement()); setId(entityAndResource.getResource().getIdElement());
myEntitySupplierUseCallback.run(); myEntitySupplierUseCallback.run();
} }
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version> <version>5.6.0-PRE5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -54,4 +54,8 @@ public class PartitionedUrl implements IModelJson {
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) { public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
myRequestPartitionId = theRequestPartitionId; myRequestPartitionId = theRequestPartitionId;
} }
public boolean isPartitioned() {
return myRequestPartitionId != null && !myRequestPartitionId.isDefaultPartition();
}
} }

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.batch.mdm; package ca.uhn.fhir.jpa.batch.mdm;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory; import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter; import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

View File

@ -69,7 +69,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
List<IBaseResource> outgoing = new ArrayList<>(); List<IBaseResource> outgoing = new ArrayList<>();
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null); sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
ourLog.trace("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", "))); ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
return outgoing; return outgoing;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.batch.reader; package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;

View File

@ -24,18 +24,23 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.slf4j.LoggerFactory.getLogger; import static org.slf4j.LoggerFactory.getLogger;
@ -52,17 +57,19 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
DaoRegistry myDaoRegistry; DaoRegistry myDaoRegistry;
@Autowired @Autowired
IResourceTableDao myResourceTableDao; IResourceTableDao myResourceTableDao;
@Autowired
IdHelperService myIdHelperService;
@Override @Override
@Nonnull @Nonnull
public ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) { public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName); IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName);
if (ourLog.isDebugEnabled()) { if (ourLog.isDebugEnabled()) {
ourLog.debug("About to retrieve version map for resource type: {}", theResourceName); ourLog.debug("About to retrieve version map for resource type: {}", theResourceName);
} }
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions())).stream() List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)).stream()
.map(ResourcePersistentId::getIdAsLong) .map(ResourcePersistentId::getIdAsLong)
.collect(Collectors.toList()); .collect(Collectors.toList());
@ -74,4 +81,95 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
return ResourceVersionMap.fromResourceTableEntities(allById); return ResourceVersionMap.fromResourceTableEntities(allById);
} }
@Override
/**
* Retrieves the latest versions for any resourceid that are found.
* If they are not found, they will not be contained in the returned map.
* The key should be the same value that was passed in to allow
* consumer to look up the value using the id they already have.
*
* This method should not throw, so it can safely be consumed in
* transactions.
*
* @param theRequestPartitionId - request partition id
* @param theIds - list of IIdTypes for resources of interest.
* @return
*/
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
for (IIdType id : theIds) {
String resourceType = id.getResourceType();
if (!resourceTypeToIds.containsKey(resourceType)) {
resourceTypeToIds.put(resourceType, new ArrayList<>());
}
resourceTypeToIds.get(resourceType).add(id);
}
for (String resourceType : resourceTypeToIds.keySet()) {
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId,
resourceTypeToIds.get(resourceType));
idToPID.putAll(idAndPID);
}
return idToPID;
}
/**
* Helper method to determine if some resources exist in the DB (without throwing).
* Returns a set that contains the IIdType for every resource found.
* If it's not found, it won't be included in the set.
*
* @param theIds - list of IIdType ids (for the same resource)
* @return
*/
private ResourcePersistentIdMap getIdsOfExistingResources(RequestPartitionId thePartitionId,
Collection<IIdType> theIds) {
// these are the found Ids that were in the db
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
if (theIds == null || theIds.isEmpty()) {
return retval;
}
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId,
theIds.stream().collect(Collectors.toList()));
// we'll use this map to fetch pids that require versions
HashMap<Long, ResourcePersistentId> pidsToVersionToResourcePid = new HashMap<>();
// fill in our map
for (ResourcePersistentId pid : resourcePersistentIds) {
if (pid.getVersion() == null) {
pidsToVersionToResourcePid.put(pid.getIdAsLong(), pid);
}
Optional<IIdType> idOp = theIds.stream()
.filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
.findFirst();
// this should always be present
// since it was passed in.
// but land of optionals...
idOp.ifPresent(id -> {
retval.put(id, pid);
});
}
// set any versions we don't already have
if (!pidsToVersionToResourcePid.isEmpty()) {
Collection<Object[]> resourceEntries = myResourceTableDao
.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
for (Object[] record : resourceEntries) {
// order matters!
Long retPid = (Long) record[0];
String resType = (String) record[1];
Long version = (Long) record[2];
pidsToVersionToResourcePid.get(retPid).setVersion(version);
}
}
return retval;
}
} }

View File

@ -122,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl; import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
@ -155,6 +156,7 @@ import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
import org.springframework.batch.core.configuration.annotation.BatchConfigurer; import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
@ -288,8 +290,8 @@ public abstract class BaseConfig {
* bean, but it provides a partially completed entity manager * bean, but it provides a partially completed entity manager
* factory with HAPI FHIR customizations * factory with HAPI FHIR customizations
*/ */
protected LocalContainerEntityManagerFactoryBean entityManagerFactory() { protected LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory myConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(); LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, fhirContext()); configureEntityManagerFactory(retVal, fhirContext());
return retVal; return retVal;
} }
@ -378,17 +380,6 @@ public abstract class BaseConfig {
return new TermConceptMappingSvcImpl(); return new TermConceptMappingSvcImpl();
} }
@Bean
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
threadPoolTaskExecutor.setCorePoolSize(searchCoordCorePoolSize);
threadPoolTaskExecutor.setMaxPoolSize(searchCoordMaxPoolSize);
threadPoolTaskExecutor.setQueueCapacity(searchCoordQueueCapacity);
threadPoolTaskExecutor.initialize();
return threadPoolTaskExecutor;
}
@Bean @Bean
public TaskScheduler taskScheduler() { public TaskScheduler taskScheduler() {
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(); ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
@ -849,8 +840,8 @@ public abstract class BaseConfig {
} }
@Bean @Bean
public ISearchCoordinatorSvc searchCoordinatorSvc(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) { public ISearchCoordinatorSvc searchCoordinatorSvc() {
return new SearchCoordinatorSvcImpl(searchCoordinatorThreadFactory); return new SearchCoordinatorSvcImpl();
} }
@Bean @Bean
@ -919,6 +910,11 @@ public abstract class BaseConfig {
return new PredicateBuilderFactory(theApplicationContext); return new PredicateBuilderFactory(theApplicationContext);
} }
@Bean
public IndexNamePrefixLayoutStrategy indexLayoutStrategy() {
return new IndexNamePrefixLayoutStrategy();
}
@Bean @Bean
public JpaResourceLoader jpaResourceLoader() { public JpaResourceLoader jpaResourceLoader() {
return new JpaResourceLoader(); return new JpaResourceLoader();

View File

@ -23,6 +23,9 @@ package ca.uhn.fhir.jpa.config;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.query.criteria.LiteralHandlingMode; import org.hibernate.query.criteria.LiteralHandlingMode;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode; import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.orm.hibernate5.SpringBeanContainer;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import java.util.Map; import java.util.Map;
@ -32,6 +35,14 @@ import java.util.Map;
* that sets some sensible default property values * that sets some sensible default property values
*/ */
public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean { public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean {
//https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener
ConfigurableListableBeanFactory myConfigurableListableBeanFactory;
public HapiFhirLocalContainerEntityManagerFactoryBean(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
}
@Override @Override
public Map<String, Object> getJpaPropertyMap() { public Map<String, Object> getJpaPropertyMap() {
Map<String, Object> retVal = super.getJpaPropertyMap(); Map<String, Object> retVal = super.getJpaPropertyMap();
@ -63,6 +74,11 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) { if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true"); retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
} }
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate needs
// in order to be able to resolve beans, so we add it back in manually here
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
}
return retVal; return retVal;
} }

View File

@ -1207,7 +1207,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) { if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
newParams = new ResourceIndexedSearchParams(); newParams = new ResourceIndexedSearchParams();
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing); mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true); changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
@ -1229,12 +1228,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
entity.setUpdated(theTransactionDetails.getTransactionDate()); entity.setUpdated(theTransactionDetails.getTransactionDate());
if (theResource instanceof IResource) {
entity.setLanguage(((IResource) theResource).getLanguage().getValue());
} else {
entity.setLanguage(((IAnyResource) theResource).getLanguageElement().getValue());
}
newParams.populateResourceTableSearchParamsPresentFlags(entity); newParams.populateResourceTableSearchParamsPresentFlags(entity);
entity.setIndexStatus(INDEX_STATUS_INDEXED); entity.setIndexStatus(INDEX_STATUS_INDEXED);
} }

View File

@ -136,9 +136,12 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;

View File

@ -25,19 +25,18 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.rest.api.QualifiedParamList; import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
@ -45,12 +44,16 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails; import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.QualifierDetails; import ca.uhn.fhir.rest.param.QualifierDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.BundleUtil; import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.OperationOutcomeUtil;
@ -91,6 +94,10 @@ public abstract class BaseStorageDao {
protected DaoRegistry myDaoRegistry; protected DaoRegistry myDaoRegistry;
@Autowired @Autowired
protected ModelConfig myModelConfig; protected ModelConfig myModelConfig;
@Autowired
protected IResourceVersionSvc myResourceVersionSvc;
@Autowired
protected DaoConfig myDaoConfig;
@VisibleForTesting @VisibleForTesting
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) { public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
@ -204,10 +211,33 @@ public abstract class BaseStorageDao {
for (IBaseReference nextReference : referencesToVersion) { for (IBaseReference nextReference : referencesToVersion) {
IIdType referenceElement = nextReference.getReferenceElement(); IIdType referenceElement = nextReference.getReferenceElement();
if (!referenceElement.hasBaseUrl()) { if (!referenceElement.hasBaseUrl()) {
String resourceType = referenceElement.getResourceType();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceType); ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
String targetVersionId = dao.getCurrentVersionId(referenceElement); Collections.singletonList(referenceElement)
String newTargetReference = referenceElement.withVersion(targetVersionId).getValue(); );
// 3 cases:
// 1) there exists a resource in the db with some version (use this version)
// 2) no resource exists, but we will create one (eventually). The version is 1
// 3) no resource exists, and none will be made -> throw
Long version;
if (resourceVersionMap.containsKey(referenceElement)) {
// the resource exists... latest id
// will be the value in the ResourcePersistentId
version = resourceVersionMap.getResourcePersistentId(referenceElement).getVersion();
} else if (myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
// if idToPID doesn't contain object
// but autcreateplaceholders is on
// then the version will be 1 (the first version)
version = 1L;
}
else {
// resource not found
// and no autocreateplaceholders set...
// we throw
throw new ResourceNotFoundException(referenceElement);
}
String newTargetReference = referenceElement.withVersion(version.toString()).getValue();
nextReference.setReference(newTargetReference); nextReference.setReference(newTargetReference);
} }
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails; import ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
@ -34,6 +35,9 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.DeleteConflict; import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList; import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@ -41,7 +45,9 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.DataFormatException;
@ -61,6 +67,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.NotModifiedException; import ca.uhn.fhir.rest.server.exceptions.NotModifiedException;
import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException; import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding; import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding; import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
@ -68,15 +75,16 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.server.util.ServletRequestUtil; import ca.uhn.fhir.rest.server.util.ServletRequestUtil;
import ca.uhn.fhir.util.AsyncUtil;
import ca.uhn.fhir.util.ElementUtil; import ca.uhn.fhir.util.ElementUtil;
import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ResourceReferenceInfo; import ca.uhn.fhir.util.ResourceReferenceInfo;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.AsyncUtil;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
@ -90,7 +98,6 @@ import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Task;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -117,11 +124,11 @@ import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static ca.uhn.fhir.util.StringUtil.toUtf8String; import static ca.uhn.fhir.util.StringUtil.toUtf8String;
import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.defaultString;
@ -154,9 +161,14 @@ public abstract class BaseTransactionProcessor {
private ModelConfig myModelConfig; private ModelConfig myModelConfig;
@Autowired @Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher; private InMemoryResourceMatcher myInMemoryResourceMatcher;
@Autowired
private SearchParamMatcher mySearchParamMatcher;
private TaskExecutor myExecutor ; private TaskExecutor myExecutor ;
@Autowired
private IResourceVersionSvc myResourceVersionSvc;
@VisibleForTesting @VisibleForTesting
public void setDaoConfig(DaoConfig theDaoConfig) { public void setDaoConfig(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig; myDaoConfig = theDaoConfig;
@ -252,8 +264,10 @@ public abstract class BaseTransactionProcessor {
myVersionAdapter.populateEntryWithOperationOutcome(caughtEx, nextEntry); myVersionAdapter.populateEntryWithOperationOutcome(caughtEx, nextEntry);
} }
private void handleTransactionCreateOrUpdateOutcome(Map<IIdType, IIdType> idSubstitutions, Map<IIdType, DaoMethodOutcome> idToPersistedOutcome, IIdType nextResourceId, DaoMethodOutcome outcome, private void handleTransactionCreateOrUpdateOutcome(Map<IIdType, IIdType> idSubstitutions, Map<IIdType, DaoMethodOutcome> idToPersistedOutcome,
IBase newEntry, String theResourceType, IBaseResource theRes, RequestDetails theRequestDetails) { IIdType nextResourceId, DaoMethodOutcome outcome,
IBase newEntry, String theResourceType,
IBaseResource theRes, RequestDetails theRequestDetails) {
IIdType newId = outcome.getId().toUnqualified(); IIdType newId = outcome.getId().toUnqualified();
IIdType resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless(); IIdType resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless();
if (newId.equals(resourceId) == false) { if (newId.equals(resourceId) == false) {
@ -267,7 +281,9 @@ public abstract class BaseTransactionProcessor {
idSubstitutions.put(id, newId); idSubstitutions.put(id, newId);
} }
} }
idToPersistedOutcome.put(newId, outcome);
populateIdToPersistedOutcomeMap(idToPersistedOutcome, newId, outcome);
if (outcome.getCreated()) { if (outcome.getCreated()) {
myVersionAdapter.setResponseStatus(newEntry, toStatusString(Constants.STATUS_HTTP_201_CREATED)); myVersionAdapter.setResponseStatus(newEntry, toStatusString(Constants.STATUS_HTTP_201_CREATED));
} else { } else {
@ -291,6 +307,21 @@ public abstract class BaseTransactionProcessor {
} }
/** Method which populates entry in idToPersistedOutcome.
* Will store whatever outcome is sent, unless the key already exists, then we only replace an instance if we find that the instance
* we are replacing with is non-lazy. This allows us to evaluate later more easily, as we _know_ we need access to these.
*/
private void populateIdToPersistedOutcomeMap(Map<IIdType, DaoMethodOutcome> idToPersistedOutcome, IIdType newId, DaoMethodOutcome outcome) {
//Prefer real method outcomes over lazy ones.
if (idToPersistedOutcome.containsKey(newId)) {
if (!(outcome instanceof LazyDaoMethodOutcome)) {
idToPersistedOutcome.put(newId, outcome);
}
} else {
idToPersistedOutcome.put(newId, outcome);
}
}
private Date getLastModified(IBaseResource theRes) { private Date getLastModified(IBaseResource theRes) {
return theRes.getMeta().getLastUpdated(); return theRes.getMeta().getLastUpdated();
} }
@ -359,8 +390,8 @@ public abstract class BaseTransactionProcessor {
IBase nextRequestEntry = null; IBase nextRequestEntry = null;
for (int i=0; i<requestEntriesSize; i++ ) { for (int i=0; i<requestEntriesSize; i++ ) {
nextRequestEntry = requestEntries.get(i); nextRequestEntry = requestEntries.get(i);
BundleTask bundleTask = new BundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode); RetriableBundleTask retriableBundleTask = new RetriableBundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
getTaskExecutor().execute(bundleTask); getTaskExecutor().execute(retriableBundleTask);
} }
// waiting for all tasks to be completed // waiting for all tasks to be completed
@ -394,7 +425,8 @@ public abstract class BaseTransactionProcessor {
myHapiTransactionService = theHapiTransactionService; myHapiTransactionService = theHapiTransactionService;
} }
private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName, boolean theNestedMode) { private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest,
final String theActionName, boolean theNestedMode) {
validateDependencies(); validateDependencies();
String transactionType = myVersionAdapter.getBundleType(theRequest); String transactionType = myVersionAdapter.getBundleType(theRequest);
@ -412,7 +444,8 @@ public abstract class BaseTransactionProcessor {
throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + transactionType); throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + transactionType);
} }
int numberOfEntries = myVersionAdapter.getEntries(theRequest).size(); List<IBase> requestEntries = myVersionAdapter.getEntries(theRequest);
int numberOfEntries = requestEntries.size();
if (myDaoConfig.getMaximumTransactionBundleSize() != null && numberOfEntries > myDaoConfig.getMaximumTransactionBundleSize()) { if (myDaoConfig.getMaximumTransactionBundleSize() != null && numberOfEntries > myDaoConfig.getMaximumTransactionBundleSize()) {
throw new PayloadTooLargeException("Transaction Bundle Too large. Transaction bundle contains " + throw new PayloadTooLargeException("Transaction Bundle Too large. Transaction bundle contains " +
@ -425,8 +458,6 @@ public abstract class BaseTransactionProcessor {
final TransactionDetails transactionDetails = new TransactionDetails(); final TransactionDetails transactionDetails = new TransactionDetails();
final StopWatch transactionStopWatch = new StopWatch(); final StopWatch transactionStopWatch = new StopWatch();
List<IBase> requestEntries = myVersionAdapter.getEntries(theRequest);
// Do all entries have a verb? // Do all entries have a verb?
for (int i = 0; i < numberOfEntries; i++) { for (int i = 0; i < numberOfEntries; i++) {
IBase nextReqEntry = requestEntries.get(i); IBase nextReqEntry = requestEntries.get(i);
@ -450,10 +481,11 @@ public abstract class BaseTransactionProcessor {
List<IBase> getEntries = new ArrayList<>(); List<IBase> getEntries = new ArrayList<>();
final IdentityHashMap<IBase, Integer> originalRequestOrder = new IdentityHashMap<>(); final IdentityHashMap<IBase, Integer> originalRequestOrder = new IdentityHashMap<>();
for (int i = 0; i < requestEntries.size(); i++) { for (int i = 0; i < requestEntries.size(); i++) {
originalRequestOrder.put(requestEntries.get(i), i); IBase requestEntry = requestEntries.get(i);
originalRequestOrder.put(requestEntry, i);
myVersionAdapter.addEntry(response); myVersionAdapter.addEntry(response);
if (myVersionAdapter.getEntryRequestVerb(myContext, requestEntries.get(i)).equals("GET")) { if (myVersionAdapter.getEntryRequestVerb(myContext, requestEntry).equals("GET")) {
getEntries.add(requestEntries.get(i)); getEntries.add(requestEntry);
} }
} }
@ -472,16 +504,43 @@ public abstract class BaseTransactionProcessor {
} }
entries.sort(new TransactionSorter(placeholderIds)); entries.sort(new TransactionSorter(placeholderIds));
doTransactionWriteOperations(theRequestDetails, theActionName, transactionDetails, transactionStopWatch, response, originalRequestOrder, entries); // perform all writes
prepareThenExecuteTransactionWriteOperations(theRequestDetails, theActionName,
transactionDetails, transactionStopWatch,
response, originalRequestOrder, entries);
// perform all gets
// (we do these last so that the gets happen on the final state of the DB;
// see above note)
doTransactionReadOperations(theRequestDetails, response,
getEntries, originalRequestOrder,
transactionStopWatch, theNestedMode);
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) {
String taskDurations = transactionStopWatch.formatTaskDurations();
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage("Transaction timing:\n" + taskDurations);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params);
}
return response;
}
private void doTransactionReadOperations(final RequestDetails theRequestDetails, IBaseBundle theResponse,
List<IBase> theGetEntries, IdentityHashMap<IBase, Integer> theOriginalRequestOrder,
StopWatch theTransactionStopWatch, boolean theNestedMode) {
if (theGetEntries.size() > 0) {
theTransactionStopWatch.startTask("Process " + theGetEntries.size() + " GET entries");
/* /*
* Loop through the request and process any entries of type GET * Loop through the request and process any entries of type GET
*/ */
if (getEntries.size() > 0) { for (IBase nextReqEntry : theGetEntries) {
transactionStopWatch.startTask("Process " + getEntries.size() + " GET entries");
}
for (IBase nextReqEntry : getEntries) {
if (theNestedMode) { if (theNestedMode) {
throw new InvalidRequestException("Can not invoke read operation on nested transaction"); throw new InvalidRequestException("Can not invoke read operation on nested transaction");
} }
@ -491,8 +550,8 @@ public abstract class BaseTransactionProcessor {
} }
ServletRequestDetails srd = (ServletRequestDetails) theRequestDetails; ServletRequestDetails srd = (ServletRequestDetails) theRequestDetails;
Integer originalOrder = originalRequestOrder.get(nextReqEntry); Integer originalOrder = theOriginalRequestOrder.get(nextReqEntry);
IBase nextRespEntry = (IBase) myVersionAdapter.getEntries(response).get(originalOrder); IBase nextRespEntry = (IBase) myVersionAdapter.getEntries(theResponse).get(originalOrder);
ArrayListMultimap<String, String> paramValues = ArrayListMultimap.create(); ArrayListMultimap<String, String> paramValues = ArrayListMultimap.create();
@ -519,7 +578,6 @@ public abstract class BaseTransactionProcessor {
Validate.isTrue(method instanceof BaseResourceReturningMethodBinding, "Unable to handle GET {}", url); Validate.isTrue(method instanceof BaseResourceReturningMethodBinding, "Unable to handle GET {}", url);
try { try {
BaseResourceReturningMethodBinding methodBinding = (BaseResourceReturningMethodBinding) method; BaseResourceReturningMethodBinding methodBinding = (BaseResourceReturningMethodBinding) method;
requestDetails.setRestOperationType(methodBinding.getRestOperationType()); requestDetails.setRestOperationType(methodBinding.getRestOperationType());
@ -536,23 +594,9 @@ public abstract class BaseTransactionProcessor {
myVersionAdapter.setResponseStatus(nextRespEntry, toStatusString(e.getStatusCode())); myVersionAdapter.setResponseStatus(nextRespEntry, toStatusString(e.getStatusCode()));
populateEntryWithOperationOutcome(e, nextRespEntry); populateEntryWithOperationOutcome(e, nextRespEntry);
} }
} }
transactionStopWatch.endCurrentTask(); theTransactionStopWatch.endCurrentTask();
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) {
String taskDurations = transactionStopWatch.formatTaskDurations();
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage("Transaction timing:\n" + taskDurations);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params);
} }
return response;
} }
/** /**
@ -564,13 +608,69 @@ public abstract class BaseTransactionProcessor {
* heavy load with lots of concurrent transactions using all available * heavy load with lots of concurrent transactions using all available
* database connections. * database connections.
*/ */
private void doTransactionWriteOperations(RequestDetails theRequestDetails, String theActionName, TransactionDetails theTransactionDetails, StopWatch theTransactionStopWatch, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries) { private void prepareThenExecuteTransactionWriteOperations(RequestDetails theRequestDetails, String theActionName,
TransactionWriteOperationsDetails writeOperationsDetails = null; TransactionDetails theTransactionDetails, StopWatch theTransactionStopWatch,
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, myInterceptorBroadcaster, theRequestDetails) || IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder,
CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, myInterceptorBroadcaster, theRequestDetails)) { List<IBase> theEntries) {
TransactionWriteOperationsDetails writeOperationsDetails = null;
if (haveWriteOperationsHooks(theRequestDetails)) {
writeOperationsDetails = buildWriteOperationsDetails(theEntries);
callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, theRequestDetails, theTransactionDetails, writeOperationsDetails);
}
TransactionCallback<Map<IBase, IIdType>> txCallback = status -> {
final Set<IIdType> allIds = new LinkedHashSet<>();
final Map<IIdType, IIdType> idSubstitutions = new HashMap<>();
final Map<IIdType, DaoMethodOutcome> idToPersistedOutcome = new HashMap<>();
Map<IBase, IIdType> retVal = doTransactionWriteOperations(theRequestDetails, theActionName,
theTransactionDetails, allIds,
idSubstitutions, idToPersistedOutcome,
theResponse, theOriginalRequestOrder,
theEntries, theTransactionStopWatch);
theTransactionStopWatch.startTask("Commit writes to database");
return retVal;
};
Map<IBase, IIdType> entriesToProcess;
try {
entriesToProcess = myHapiTransactionService.execute(theRequestDetails, theTransactionDetails, txCallback);
} finally {
if (haveWriteOperationsHooks(theRequestDetails)) {
callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, theRequestDetails, theTransactionDetails, writeOperationsDetails);
}
}
theTransactionStopWatch.endCurrentTask();
for (Map.Entry<IBase, IIdType> nextEntry : entriesToProcess.entrySet()) {
String responseLocation = nextEntry.getValue().toUnqualified().getValue();
String responseEtag = nextEntry.getValue().getVersionIdPart();
myVersionAdapter.setResponseLocation(nextEntry.getKey(), responseLocation);
myVersionAdapter.setResponseETag(nextEntry.getKey(), responseEtag);
}
}
private boolean haveWriteOperationsHooks(RequestDetails theRequestDetails) {
return CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, myInterceptorBroadcaster, theRequestDetails) ||
CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, myInterceptorBroadcaster, theRequestDetails);
}
private void callWriteOperationsHook(Pointcut thePointcut, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, TransactionWriteOperationsDetails theWriteOperationsDetails) {
HookParams params = new HookParams()
.add(TransactionDetails.class, theTransactionDetails)
.add(TransactionWriteOperationsDetails.class, theWriteOperationsDetails);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, thePointcut, params);
}
private TransactionWriteOperationsDetails buildWriteOperationsDetails(List<IBase> theEntries) {
TransactionWriteOperationsDetails writeOperationsDetails;
List<String> updateRequestUrls = new ArrayList<>(); List<String> updateRequestUrls = new ArrayList<>();
List<String> conditionalCreateRequestUrls = new ArrayList<>(); List<String> conditionalCreateRequestUrls = new ArrayList<>();
//Extract
for (IBase nextEntry : theEntries) { for (IBase nextEntry : theEntries) {
String method = myVersionAdapter.getEntryRequestVerb(myContext, nextEntry); String method = myVersionAdapter.getEntryRequestVerb(myContext, nextEntry);
if ("PUT".equals(method)) { if ("PUT".equals(method)) {
@ -589,43 +689,7 @@ public abstract class BaseTransactionProcessor {
writeOperationsDetails = new TransactionWriteOperationsDetails(); writeOperationsDetails = new TransactionWriteOperationsDetails();
writeOperationsDetails.setUpdateRequestUrls(updateRequestUrls); writeOperationsDetails.setUpdateRequestUrls(updateRequestUrls);
writeOperationsDetails.setConditionalCreateRequestUrls(conditionalCreateRequestUrls); writeOperationsDetails.setConditionalCreateRequestUrls(conditionalCreateRequestUrls);
HookParams params = new HookParams() return writeOperationsDetails;
.add(TransactionDetails.class, theTransactionDetails)
.add(TransactionWriteOperationsDetails.class, writeOperationsDetails);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, params);
}
TransactionCallback<Map<IBase, IIdType>> txCallback = status -> {
final Set<IIdType> allIds = new LinkedHashSet<>();
final Map<IIdType, IIdType> idSubstitutions = new HashMap<>();
final Map<IIdType, DaoMethodOutcome> idToPersistedOutcome = new HashMap<>();
Map<IBase, IIdType> retVal = doTransactionWriteOperations(theRequestDetails, theActionName, theTransactionDetails, allIds, idSubstitutions, idToPersistedOutcome, theResponse, theOriginalRequestOrder, theEntries, theTransactionStopWatch);
theTransactionStopWatch.startTask("Commit writes to database");
return retVal;
};
Map<IBase, IIdType> entriesToProcess;
try {
entriesToProcess = myHapiTransactionService.execute(theRequestDetails, theTransactionDetails, txCallback);
} finally {
if (writeOperationsDetails != null) {
HookParams params = new HookParams()
.add(TransactionDetails.class, theTransactionDetails)
.add(TransactionWriteOperationsDetails.class, writeOperationsDetails);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, params);
}
}
theTransactionStopWatch.endCurrentTask();
for (Map.Entry<IBase, IIdType> nextEntry : entriesToProcess.entrySet()) {
String responseLocation = nextEntry.getValue().toUnqualified().getValue();
String responseEtag = nextEntry.getValue().getVersionIdPart();
myVersionAdapter.setResponseLocation(nextEntry.getKey(), responseLocation);
myVersionAdapter.setResponseETag(nextEntry.getKey(), responseEtag);
}
} }
private boolean isValidVerb(String theVerb) { private boolean isValidVerb(String theVerb) {
@ -664,34 +728,19 @@ public abstract class BaseTransactionProcessor {
myModelConfig = theModelConfig; myModelConfig = theModelConfig;
} }
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds, /**
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) { * Searches for duplicate conditional creates and consolidates them.
*
theTransactionDetails.beginAcceptingDeferredInterceptorBroadcasts( * @param theEntries
Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED,
Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED,
Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED
);
try {
Set<String> deletedResources = new HashSet<>();
DeleteConflictList deleteConflicts = new DeleteConflictList();
Map<IBase, IIdType> entriesToProcess = new IdentityHashMap<>();
Set<IIdType> nonUpdatedEntities = new HashSet<>();
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
List<IBaseResource> updatedResources = new ArrayList<>();
Map<String, Class<? extends IBaseResource>> conditionalRequestUrls = new HashMap<>();
/*
* Look for duplicate conditional creates and consolidate them
*/ */
private void consolidateDuplicateConditionals(List<IBase> theEntries) {
final HashMap<String, String> keyToUuid = new HashMap<>(); final HashMap<String, String> keyToUuid = new HashMap<>();
for (int index = 0, originalIndex = 0; index < theEntries.size(); index++, originalIndex++) { for (int index = 0, originalIndex = 0; index < theEntries.size(); index++, originalIndex++) {
IBase nextReqEntry = theEntries.get(index); IBase nextReqEntry = theEntries.get(index);
IBaseResource resource = myVersionAdapter.getResource(nextReqEntry); IBaseResource resource = myVersionAdapter.getResource(nextReqEntry);
if (resource != null) { if (resource != null) {
String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry); String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry);
String entryUrl = myVersionAdapter.getFullUrl(nextReqEntry); String entryFullUrl = myVersionAdapter.getFullUrl(nextReqEntry);
String requestUrl = myVersionAdapter.getEntryRequestUrl(nextReqEntry); String requestUrl = myVersionAdapter.getEntryRequestUrl(nextReqEntry);
String ifNoneExist = myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry); String ifNoneExist = myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry);
String key = verb + "|" + requestUrl + "|" + ifNoneExist; String key = verb + "|" + requestUrl + "|" + ifNoneExist;
@ -699,7 +748,7 @@ public abstract class BaseTransactionProcessor {
// Conditional UPDATE // Conditional UPDATE
boolean consolidateEntry = false; boolean consolidateEntry = false;
if ("PUT".equals(verb)) { if ("PUT".equals(verb)) {
if (isNotBlank(entryUrl) && isNotBlank(requestUrl)) { if (isNotBlank(entryFullUrl) && isNotBlank(requestUrl)) {
int questionMarkIndex = requestUrl.indexOf('?'); int questionMarkIndex = requestUrl.indexOf('?');
if (questionMarkIndex >= 0 && requestUrl.length() > (questionMarkIndex + 1)) { if (questionMarkIndex >= 0 && requestUrl.length() > (questionMarkIndex + 1)) {
consolidateEntry = true; consolidateEntry = true;
@ -709,8 +758,8 @@ public abstract class BaseTransactionProcessor {
// Conditional CREATE // Conditional CREATE
if ("POST".equals(verb)) { if ("POST".equals(verb)) {
if (isNotBlank(entryUrl) && isNotBlank(requestUrl) && isNotBlank(ifNoneExist)) { if (isNotBlank(entryFullUrl) && isNotBlank(requestUrl) && isNotBlank(ifNoneExist)) {
if (!entryUrl.equals(requestUrl)) { if (!entryFullUrl.equals(requestUrl)) {
consolidateEntry = true; consolidateEntry = true;
} }
} }
@ -718,12 +767,24 @@ public abstract class BaseTransactionProcessor {
if (consolidateEntry) { if (consolidateEntry) {
if (!keyToUuid.containsKey(key)) { if (!keyToUuid.containsKey(key)) {
keyToUuid.put(key, entryUrl); keyToUuid.put(key, entryFullUrl);
} else { } else {
ourLog.info("Discarding transaction bundle entry {} as it contained a duplicate conditional {}", originalIndex, verb); ourLog.info("Discarding transaction bundle entry {} as it contained a duplicate conditional {}", originalIndex, verb);
theEntries.remove(index); theEntries.remove(index);
index--; index--;
String existingUuid = keyToUuid.get(key); String existingUuid = keyToUuid.get(key);
replaceReferencesInEntriesWithConsolidatedUUID(theEntries, entryFullUrl, existingUuid);
}
}
}
}
}
/**
* Iterates over all entries, and if it finds any which have references which match the fullUrl of the entry that was consolidated out
* replace them with our new consolidated UUID
*/
private void replaceReferencesInEntriesWithConsolidatedUUID(List<IBase> theEntries, String theEntryFullUrl, String existingUuid) {
for (IBase nextEntry : theEntries) { for (IBase nextEntry : theEntries) {
IBaseResource nextResource = myVersionAdapter.getResource(nextEntry); IBaseResource nextResource = myVersionAdapter.getResource(nextEntry);
for (IBaseReference nextReference : myContext.newTerser().getAllPopulatedChildElementsOfType(nextResource, IBaseReference.class)) { for (IBaseReference nextReference : myContext.newTerser().getAllPopulatedChildElementsOfType(nextResource, IBaseReference.class)) {
@ -733,36 +794,29 @@ public abstract class BaseTransactionProcessor {
if (isBlank(nextReferenceId) && nextReference.getResource() != null) { if (isBlank(nextReferenceId) && nextReference.getResource() != null) {
nextReferenceId = nextReference.getResource().getIdElement().getValue(); nextReferenceId = nextReference.getResource().getIdElement().getValue();
} }
if (entryUrl.equals(nextReferenceId)) { if (theEntryFullUrl.equals(nextReferenceId)) {
nextReference.setReference(existingUuid); nextReference.setReference(existingUuid);
nextReference.setResource(null); nextReference.setResource(null);
} }
} }
} }
} }
}
}
}
/**
/* * Retrieves the next resource id (IIdType) from the base resource and next request entry.
* Loop through the request and process any entries of type * @param theBaseResource - base resource
* PUT, POST or DELETE * @param theNextReqEntry - next request entry
* @param theAllIds - set of all IIdType values
* @return
*/ */
for (int i = 0; i < theEntries.size(); i++) { private IIdType getNextResourceIdFromBaseResource(IBaseResource theBaseResource,
IBase theNextReqEntry,
if (i % 250 == 0) { Set<IIdType> theAllIds) {
ourLog.debug("Processed {} non-GET entries out of {} in transaction", i, theEntries.size());
}
IBase nextReqEntry = theEntries.get(i);
IBaseResource res = myVersionAdapter.getResource(nextReqEntry);
IIdType nextResourceId = null; IIdType nextResourceId = null;
if (res != null) { if (theBaseResource != null) {
nextResourceId = theBaseResource.getIdElement();
nextResourceId = res.getIdElement(); String fullUrl = myVersionAdapter.getFullUrl(theNextReqEntry);
String fullUrl = myVersionAdapter.getFullUrl(nextReqEntry);
if (isNotBlank(fullUrl)) { if (isNotBlank(fullUrl)) {
IIdType fullUrlIdType = newIdType(fullUrl); IIdType fullUrlIdType = newIdType(fullUrl);
if (isPlaceholder(fullUrlIdType)) { if (isPlaceholder(fullUrlIdType)) {
@ -777,8 +831,8 @@ public abstract class BaseTransactionProcessor {
} }
if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) { if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) {
nextResourceId = newIdType(toResourceName(res.getClass()), nextResourceId.getIdPart()); nextResourceId = newIdType(toResourceName(theBaseResource.getClass()), nextResourceId.getIdPart());
res.setId(nextResourceId); theBaseResource.setId(nextResourceId);
} }
/* /*
@ -797,6 +851,52 @@ public abstract class BaseTransactionProcessor {
} }
return nextResourceId;
}
/** After pre-hooks have been called
*
*/
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName,
TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder,
List<IBase> theEntries, StopWatch theTransactionStopWatch) {
// During a transaction, we don't execute hooks, instead, we execute them all post-transaction.
theTransactionDetails.beginAcceptingDeferredInterceptorBroadcasts(
Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED,
Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED,
Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED
);
try {
Set<String> deletedResources = new HashSet<>();
DeleteConflictList deleteConflicts = new DeleteConflictList();
Map<IBase, IIdType> entriesToProcess = new IdentityHashMap<>();
Set<IIdType> nonUpdatedEntities = new HashSet<>();
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
Map<String, IIdType> conditionalUrlToIdMap = new HashMap<>();
List<IBaseResource> updatedResources = new ArrayList<>();
Map<String, Class<? extends IBaseResource>> conditionalRequestUrls = new HashMap<>();
/*
* Look for duplicate conditional creates and consolidate them
*/
consolidateDuplicateConditionals(theEntries);
/*
* Loop through the request and process any entries of type
* PUT, POST or DELETE
*/
for (int i = 0; i < theEntries.size(); i++) {
if (i % 250 == 0) {
ourLog.debug("Processed {} non-GET entries out of {} in transaction", i, theEntries.size());
}
IBase nextReqEntry = theEntries.get(i);
IBaseResource res = myVersionAdapter.getResource(nextReqEntry);
IIdType nextResourceId = getNextResourceIdFromBaseResource(res, nextReqEntry, theAllIds);
String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry); String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry);
String resourceType = res != null ? myContext.getResourceType(res) : null; String resourceType = res != null ? myContext.getResourceType(res) : null;
Integer order = theOriginalRequestOrder.get(nextReqEntry); Integer order = theOriginalRequestOrder.get(nextReqEntry);
@ -815,6 +915,7 @@ public abstract class BaseTransactionProcessor {
String matchUrl = myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry); String matchUrl = myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry);
matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl);
outcome = resourceDao.create(res, matchUrl, false, theTransactionDetails, theRequest); outcome = resourceDao.create(res, matchUrl, false, theTransactionDetails, theRequest);
setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, outcome.getId());
res.setId(outcome.getId()); res.setId(outcome.getId());
if (nextResourceId != null) { if (nextResourceId != null) {
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest); handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest);
@ -849,6 +950,7 @@ public abstract class BaseTransactionProcessor {
String matchUrl = parts.getResourceType() + '?' + parts.getParams(); String matchUrl = parts.getResourceType() + '?' + parts.getParams();
matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl);
DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(matchUrl, deleteConflicts, theRequest); DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(matchUrl, deleteConflicts, theRequest);
setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, deleteOutcome.getId());
List<ResourceTable> allDeleted = deleteOutcome.getDeletedEntities(); List<ResourceTable> allDeleted = deleteOutcome.getDeletedEntities();
for (ResourceTable deleted : allDeleted) { for (ResourceTable deleted : allDeleted) {
deletedResources.add(deleted.getIdDt().toUnqualifiedVersionless().getValueAsString()); deletedResources.add(deleted.getIdDt().toUnqualifiedVersionless().getValueAsString());
@ -891,6 +993,7 @@ public abstract class BaseTransactionProcessor {
} }
matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl);
outcome = resourceDao.update(res, matchUrl, false, false, theRequest, theTransactionDetails); outcome = resourceDao.update(res, matchUrl, false, false, theRequest, theTransactionDetails);
setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, outcome.getId());
if (Boolean.TRUE.equals(outcome.getCreated())) { if (Boolean.TRUE.equals(outcome.getCreated())) {
conditionalRequestUrls.put(matchUrl, res.getClass()); conditionalRequestUrls.put(matchUrl, res.getClass());
} }
@ -904,7 +1007,8 @@ public abstract class BaseTransactionProcessor {
} }
} }
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest); handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId,
outcome, nextRespEntry, resourceType, res, theRequest);
entriesToProcess.put(nextRespEntry, outcome.getId()); entriesToProcess.put(nextRespEntry, outcome.getId());
break; break;
} }
@ -948,6 +1052,7 @@ public abstract class BaseTransactionProcessor {
IFhirResourceDao<? extends IBaseResource> dao = toDao(parts, verb, url); IFhirResourceDao<? extends IBaseResource> dao = toDao(parts, verb, url);
IIdType patchId = myContext.getVersion().newIdType().setValue(parts.getResourceId()); IIdType patchId = myContext.getVersion().newIdType().setValue(parts.getResourceId());
DaoMethodOutcome outcome = dao.patch(patchId, matchUrl, patchType, patchBody, patchBodyParameters, theRequest); DaoMethodOutcome outcome = dao.patch(patchId, matchUrl, patchType, patchBody, patchBodyParameters, theRequest);
setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, outcome.getId());
updatedEntities.add(outcome.getEntity()); updatedEntities.add(outcome.getEntity());
if (outcome.getResource() != null) { if (outcome.getResource() != null) {
updatedResources.add(outcome.getResource()); updatedResources.add(outcome.getResource());
@ -971,52 +1076,24 @@ public abstract class BaseTransactionProcessor {
* was also deleted as a part of this transaction, which is why we check this now at the * was also deleted as a part of this transaction, which is why we check this now at the
* end. * end.
*/ */
for (Iterator<DeleteConflict> iter = deleteConflicts.iterator(); iter.hasNext(); ) { checkForDeleteConflicts(deleteConflicts, deletedResources, updatedResources);
DeleteConflict nextDeleteConflict = iter.next();
/* theIdToPersistedOutcome.entrySet().forEach(idAndOutcome -> {
* If we have a conflict, it means we can't delete Resource/A because theTransactionDetails.addResolvedResourceId(idAndOutcome.getKey(), idAndOutcome.getValue().getPersistentId());
* Resource/B has a reference to it. We'll ignore that conflict though });
* if it turns out we're also deleting Resource/B in this transaction.
*/
if (deletedResources.contains(nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue())) {
iter.remove();
continue;
}
/*
* And then, this is kind of a last ditch check. It's also ok to delete
* Resource/A if Resource/B isn't being deleted, but it is being UPDATED
* in this transaction, and the updated version of it has no references
* to Resource/A any more.
*/
String sourceId = nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue();
String targetId = nextDeleteConflict.getTargetId().toUnqualifiedVersionless().getValue();
Optional<IBaseResource> updatedSource = updatedResources
.stream()
.filter(t -> sourceId.equals(t.getIdElement().toUnqualifiedVersionless().getValue()))
.findFirst();
if (updatedSource.isPresent()) {
List<ResourceReferenceInfo> referencesInSource = myContext.newTerser().getAllResourceReferences(updatedSource.get());
boolean sourceStillReferencesTarget = referencesInSource
.stream()
.anyMatch(t -> targetId.equals(t.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue()));
if (!sourceStillReferencesTarget) {
iter.remove();
}
}
}
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(myContext, deleteConflicts);
theIdToPersistedOutcome.entrySet().forEach(t -> theTransactionDetails.addResolvedResourceId(t.getKey(), t.getValue().getPersistentId()));
/* /*
* Perform ID substitutions and then index each resource we have saved * Perform ID substitutions and then index each resource we have saved
*/ */
resolveReferencesThenSaveAndIndexResources(theRequest, theTransactionDetails, theIdSubstitutions, theIdToPersistedOutcome, theTransactionStopWatch, entriesToProcess, nonUpdatedEntities, updatedEntities); resolveReferencesThenSaveAndIndexResources(theRequest, theTransactionDetails,
theIdSubstitutions, theIdToPersistedOutcome,
theTransactionStopWatch, entriesToProcess,
nonUpdatedEntities, updatedEntities);
theTransactionStopWatch.endCurrentTask(); theTransactionStopWatch.endCurrentTask();
// flush writes to db
theTransactionStopWatch.startTask("Flush writes to database"); theTransactionStopWatch.startTask("Flush writes to database");
flushSession(theIdToPersistedOutcome); flushSession(theIdToPersistedOutcome);
@ -1032,6 +1109,15 @@ public abstract class BaseTransactionProcessor {
if (!myDaoConfig.isMassIngestionMode()) { if (!myDaoConfig.isMassIngestionMode()) {
validateNoDuplicates(theRequest, theActionName, conditionalRequestUrls, theIdToPersistedOutcome.values()); validateNoDuplicates(theRequest, theActionName, conditionalRequestUrls, theIdToPersistedOutcome.values());
} }
theTransactionStopWatch.endCurrentTask();
if (conditionalUrlToIdMap.size() > 0) {
theTransactionStopWatch.startTask("Check that all conditionally created/updated entities actually match their conditionals.");
}
if (!myDaoConfig.isMassIngestionMode()) {
validateAllInsertsMatchTheirConditionalUrls(theIdToPersistedOutcome, conditionalUrlToIdMap, theRequest);
}
theTransactionStopWatch.endCurrentTask(); theTransactionStopWatch.endCurrentTask();
for (IIdType next : theAllIds) { for (IIdType next : theAllIds) {
@ -1070,6 +1156,86 @@ public abstract class BaseTransactionProcessor {
} }
} }
private void setConditionalUrlToBeValidatedLater(Map<String, IIdType> theConditionalUrlToIdMap, String theMatchUrl, IIdType theId) {
if (!StringUtils.isBlank(theMatchUrl)) {
theConditionalUrlToIdMap.put(theMatchUrl, theId);
}
}
/**
* After transaction processing and resolution of indexes and references, we want to validate that the resources that were stored _actually_
* match the conditional URLs that they were brought in on.
* @param theIdToPersistedOutcome
* @param conditionalUrlToIdMap
*/
private void validateAllInsertsMatchTheirConditionalUrls(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, Map<String, IIdType> conditionalUrlToIdMap, RequestDetails theRequest) {
conditionalUrlToIdMap.entrySet().stream()
.filter(entry -> entry.getKey() != null)
.forEach(entry -> {
String matchUrl = entry.getKey();
IIdType value = entry.getValue();
DaoMethodOutcome daoMethodOutcome = theIdToPersistedOutcome.get(value);
if (daoMethodOutcome != null && !daoMethodOutcome.isNop() && daoMethodOutcome.getResource() != null) {
InMemoryMatchResult match = mySearchParamMatcher.match(matchUrl, daoMethodOutcome.getResource(), theRequest);
if (ourLog.isDebugEnabled()) {
ourLog.debug("Checking conditional URL [{}] against resource with ID [{}]: Supported?:[{}], Matched?:[{}]", matchUrl, value, match.supported(), match.matched());
}
if (match.supported()) {
if (!match.matched()) {
throw new PreconditionFailedException("Invalid conditional URL \"" + matchUrl + "\". The given resource is not matched by this URL.");
};
}
}
});
}
/**
* Checks for any delete conflicts.
* @param theDeleteConflicts - set of delete conflicts
* @param theDeletedResources - set of deleted resources
* @param theUpdatedResources - list of updated resources
*/
private void checkForDeleteConflicts(DeleteConflictList theDeleteConflicts,
Set<String> theDeletedResources,
List<IBaseResource> theUpdatedResources) {
for (Iterator<DeleteConflict> iter = theDeleteConflicts.iterator(); iter.hasNext(); ) {
DeleteConflict nextDeleteConflict = iter.next();
/*
* If we have a conflict, it means we can't delete Resource/A because
* Resource/B has a reference to it. We'll ignore that conflict though
* if it turns out we're also deleting Resource/B in this transaction.
*/
if (theDeletedResources.contains(nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue())) {
iter.remove();
continue;
}
/*
* And then, this is kind of a last ditch check. It's also ok to delete
* Resource/A if Resource/B isn't being deleted, but it is being UPDATED
* in this transaction, and the updated version of it has no references
* to Resource/A any more.
*/
String sourceId = nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue();
String targetId = nextDeleteConflict.getTargetId().toUnqualifiedVersionless().getValue();
Optional<IBaseResource> updatedSource = theUpdatedResources
.stream()
.filter(t -> sourceId.equals(t.getIdElement().toUnqualifiedVersionless().getValue()))
.findFirst();
if (updatedSource.isPresent()) {
List<ResourceReferenceInfo> referencesInSource = myContext.newTerser().getAllResourceReferences(updatedSource.get());
boolean sourceStillReferencesTarget = referencesInSource
.stream()
.anyMatch(t -> targetId.equals(t.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue()));
if (!sourceStillReferencesTarget) {
iter.remove();
}
}
}
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(myContext, theDeleteConflicts);
}
/** /**
* This method replaces any placeholder references in the * This method replaces any placeholder references in the
* source transaction Bundle with their actual targets, then stores the resource contents and indexes * source transaction Bundle with their actual targets, then stores the resource contents and indexes
@ -1092,7 +1258,10 @@ public abstract class BaseTransactionProcessor {
* pass because it's too complex to try and insert the auto-versioned references and still * pass because it's too complex to try and insert the auto-versioned references and still
* account for NOPs, so we block NOPs in that pass. * account for NOPs, so we block NOPs in that pass.
*/ */
private void resolveReferencesThenSaveAndIndexResources(RequestDetails theRequest, TransactionDetails theTransactionDetails, Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, StopWatch theTransactionStopWatch, Map<IBase, IIdType> entriesToProcess, Set<IIdType> nonUpdatedEntities, Set<IBasePersistedResource> updatedEntities) { private void resolveReferencesThenSaveAndIndexResources(RequestDetails theRequest, TransactionDetails theTransactionDetails,
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
StopWatch theTransactionStopWatch, Map<IBase, IIdType> entriesToProcess,
Set<IIdType> nonUpdatedEntities, Set<IBasePersistedResource> updatedEntities) {
FhirTerser terser = myContext.newTerser(); FhirTerser terser = myContext.newTerser();
theTransactionStopWatch.startTask("Index " + theIdToPersistedOutcome.size() + " resources"); theTransactionStopWatch.startTask("Index " + theIdToPersistedOutcome.size() + " resources");
IdentityHashMap<DaoMethodOutcome, Set<IBaseReference>> deferredIndexesForAutoVersioning = null; IdentityHashMap<DaoMethodOutcome, Set<IBaseReference>> deferredIndexesForAutoVersioning = null;
@ -1114,8 +1283,15 @@ public abstract class BaseTransactionProcessor {
Set<IBaseReference> referencesToAutoVersion = BaseStorageDao.extractReferencesToAutoVersion(myContext, myModelConfig, nextResource); Set<IBaseReference> referencesToAutoVersion = BaseStorageDao.extractReferencesToAutoVersion(myContext, myModelConfig, nextResource);
if (referencesToAutoVersion.isEmpty()) { if (referencesToAutoVersion.isEmpty()) {
resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails, theIdSubstitutions, theIdToPersistedOutcome, entriesToProcess, nonUpdatedEntities, updatedEntities, terser, nextOutcome, nextResource, referencesToAutoVersion); // no references to autoversion - we can do the resolve and save now
resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails,
theIdSubstitutions, theIdToPersistedOutcome,
entriesToProcess, nonUpdatedEntities,
updatedEntities, terser,
nextOutcome, nextResource,
referencesToAutoVersion); // this is empty
} else { } else {
// we have autoversioned things to defer until later
if (deferredIndexesForAutoVersioning == null) { if (deferredIndexesForAutoVersioning == null) {
deferredIndexesForAutoVersioning = new IdentityHashMap<>(); deferredIndexesForAutoVersioning = new IdentityHashMap<>();
} }
@ -1129,12 +1305,24 @@ public abstract class BaseTransactionProcessor {
DaoMethodOutcome nextOutcome = nextEntry.getKey(); DaoMethodOutcome nextOutcome = nextEntry.getKey();
Set<IBaseReference> referencesToAutoVersion = nextEntry.getValue(); Set<IBaseReference> referencesToAutoVersion = nextEntry.getValue();
IBaseResource nextResource = nextOutcome.getResource(); IBaseResource nextResource = nextOutcome.getResource();
resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails, theIdSubstitutions, theIdToPersistedOutcome, entriesToProcess, nonUpdatedEntities, updatedEntities, terser, nextOutcome, nextResource, referencesToAutoVersion);
resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails,
theIdSubstitutions, theIdToPersistedOutcome,
entriesToProcess, nonUpdatedEntities,
updatedEntities, terser,
nextOutcome, nextResource,
referencesToAutoVersion);
} }
} }
} }
private void resolveReferencesThenSaveAndIndexResource(RequestDetails theRequest, TransactionDetails theTransactionDetails, Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, Map<IBase, IIdType> entriesToProcess, Set<IIdType> nonUpdatedEntities, Set<IBasePersistedResource> updatedEntities, FhirTerser terser, DaoMethodOutcome nextOutcome, IBaseResource nextResource, Set<IBaseReference> theReferencesToAutoVersion) { private void resolveReferencesThenSaveAndIndexResource(RequestDetails theRequest, TransactionDetails theTransactionDetails,
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
Map<IBase, IIdType> entriesToProcess, Set<IIdType> nonUpdatedEntities,
Set<IBasePersistedResource> updatedEntities, FhirTerser terser,
DaoMethodOutcome nextOutcome, IBaseResource nextResource,
Set<IBaseReference> theReferencesToAutoVersion) {
// References // References
List<ResourceReferenceInfo> allRefs = terser.getAllResourceReferences(nextResource); List<ResourceReferenceInfo> allRefs = terser.getAllResourceReferences(nextResource);
for (ResourceReferenceInfo nextRef : allRefs) { for (ResourceReferenceInfo nextRef : allRefs) {
@ -1175,9 +1363,34 @@ public abstract class BaseTransactionProcessor {
} else if (nextId.getValue().startsWith("urn:")) { } else if (nextId.getValue().startsWith("urn:")) {
throw new InvalidRequestException("Unable to satisfy placeholder ID " + nextId.getValue() + " found in element named '" + nextRef.getName() + "' within resource of type: " + nextResource.getIdElement().getResourceType()); throw new InvalidRequestException("Unable to satisfy placeholder ID " + nextId.getValue() + " found in element named '" + nextRef.getName() + "' within resource of type: " + nextResource.getIdElement().getResourceType());
} else { } else {
// get a map of
// existing ids -> PID (for resources that exist in the DB)
// should this be allPartitions?
ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
theReferencesToAutoVersion.stream()
.map(IBaseReference::getReferenceElement).collect(Collectors.toList()));
for (IBaseReference baseRef : theReferencesToAutoVersion) {
IIdType id = baseRef.getReferenceElement();
if (!resourceVersionMap.containsKey(id)
&& myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
// not in the db, but autocreateplaceholders is true
// so the version we'll set is "1" (since it will be
// created later)
String newRef = id.withVersion("1").getValue();
id.setValue(newRef);
} else {
// we will add the looked up info to the transaction
// for later
theTransactionDetails.addResolvedResourceId(id,
resourceVersionMap.getResourcePersistentId(id));
}
}
if (theReferencesToAutoVersion.contains(resourceReference)) { if (theReferencesToAutoVersion.contains(resourceReference)) {
DaoMethodOutcome outcome = theIdToPersistedOutcome.get(nextId); DaoMethodOutcome outcome = theIdToPersistedOutcome.get(nextId);
if (!outcome.isNop() && !Boolean.TRUE.equals(outcome.getCreated())) {
if (outcome != null && !outcome.isNop() && !Boolean.TRUE.equals(outcome.getCreated())) {
addRollbackReferenceRestore(theTransactionDetails, resourceReference); addRollbackReferenceRestore(theTransactionDetails, resourceReference);
resourceReference.setReference(nextId.getValue()); resourceReference.setReference(nextId.getValue());
resourceReference.setResource(null); resourceReference.setResource(null);
@ -1266,7 +1479,7 @@ public abstract class BaseTransactionProcessor {
thePersistedOutcomes thePersistedOutcomes
.stream() .stream()
.filter(t -> !t.isNop()) .filter(t -> !t.isNop())
.filter(t -> t.getEntity() instanceof ResourceTable) .filter(t -> t.getEntity() instanceof ResourceTable)//N.B. GGG: This validation never occurs for mongo, as nothing is a ResourceTable.
.filter(t -> t.getEntity().getDeleted() == null) .filter(t -> t.getEntity().getDeleted() == null)
.filter(t -> t.getResource() != null) .filter(t -> t.getResource() != null)
.forEach(t -> resourceToIndexedParams.put(t.getResource(), new ResourceIndexedSearchParams((ResourceTable) t.getEntity()))); .forEach(t -> resourceToIndexedParams.put(t.getResource(), new ResourceIndexedSearchParams((ResourceTable) t.getEntity())));
@ -1565,30 +1778,29 @@ public abstract class BaseTransactionProcessor {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode)); return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
} }
public class BundleTask implements Runnable { public class RetriableBundleTask implements Runnable {
private CountDownLatch myCompletedLatch; private final CountDownLatch myCompletedLatch;
private RequestDetails myRequestDetails; private final RequestDetails myRequestDetails;
private IBase myNextReqEntry; private final IBase myNextReqEntry;
private Map<Integer, Object> myResponseMap; private final Map<Integer, Object> myResponseMap;
private int myResponseOrder; private final int myResponseOrder;
private boolean myNestedMode; private final boolean myNestedMode;
private BaseServerResponseException myLastSeenException;
protected BundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) { protected RetriableBundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
this.myCompletedLatch = theCompletedLatch; this.myCompletedLatch = theCompletedLatch;
this.myRequestDetails = theRequestDetails; this.myRequestDetails = theRequestDetails;
this.myNextReqEntry = theNextReqEntry; this.myNextReqEntry = theNextReqEntry;
this.myResponseMap = theResponseMap; this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder; this.myResponseOrder = theResponseOrder;
this.myNestedMode = theNestedMode; this.myNestedMode = theNestedMode;
this.myLastSeenException = null;
} }
@Override private void processBatchEntry() {
public void run() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) myNextReqEntry); myVersionAdapter.addEntry(subRequestBundle, myNextReqEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode); IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode);
@ -1602,22 +1814,45 @@ public abstract class BaseTransactionProcessor {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry); myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry);
} }
}
private boolean processBatchEntryWithRetry() {
int maxAttempts =3;
for (int attempt = 1;; attempt++) {
try {
processBatchEntry();
return true;
} catch (BaseServerResponseException e) { } catch (BaseServerResponseException e) {
caughtEx.setException(e); //If we catch a known and structured exception from HAPI, just fail.
myLastSeenException = e;
return false;
} catch (Throwable t) { } catch (Throwable t) {
myLastSeenException = new InternalErrorException(t);
//If we have caught a non-tag-storage failure we are unfamiliar with, or we have exceeded max attempts, exit.
if (!DaoFailureUtil.isTagStorageFailure(t) || attempt >= maxAttempts) {
ourLog.error("Failure during BATCH sub transaction processing", t); ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx.setException(new InternalErrorException(t)); return false;
}
}
}
} }
if (caughtEx.getException() != null) { @Override
// add exception to the response map public void run() {
myResponseMap.put(myResponseOrder, caughtEx); boolean success = processBatchEntryWithRetry();
if (!success) {
populateResponseMapWithLastSeenException();
} }
// checking for the parallelism // checking for the parallelism
ourLog.debug("processing bacth for {} is completed", myVersionAdapter.getEntryRequestUrl((IBase)myNextReqEntry)); ourLog.debug("processing batch for {} is completed", myVersionAdapter.getEntryRequestUrl(myNextReqEntry));
myCompletedLatch.countDown(); myCompletedLatch.countDown();
} }
private void populateResponseMapWithLastSeenException() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
caughtEx.setException(myLastSeenException);
myResponseMap.put(myResponseOrder, caughtEx);
}
} }
} }

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.commons.lang3.StringUtils;
/**
* Utility class to help identify classes of failure.
*/
public class DaoFailureUtil {
public static boolean isTagStorageFailure(Throwable t) {
if (StringUtils.isBlank(t.getMessage())) {
return false;
} else {
String msg = t.getMessage().toLowerCase();
return msg.contains("hfj_tag_def") || msg.contains("hfj_res_tag");
}
}
}

View File

@ -100,6 +100,16 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid") @Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
Long findCurrentVersionByPid(@Param("pid") Long thePid); Long findCurrentVersionByPid(@Param("pid") Long thePid);
/**
* This query will return rows with the following values:
* Id (resource pid - long), ResourceType (Patient, etc), version (long)
* Order matters!
* @param pid - list of pids to get versions for
* @return
*/
@Query("SELECT t.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myId IN ( :pid )")
Collection<Object[]> getResourceVersionsForPid(@Param("pid") List<Long> pid);
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid") @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId); Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);

View File

@ -94,7 +94,6 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
throw new InvalidRequestException("Resource " + resName + "/" + idPart + " not found, specified in path: " + theSourcePath); throw new InvalidRequestException("Resource " + resName + "/" + idPart + " not found, specified in path: " + theSourcePath);
} }
resolvedResource = createdTableOpt.get(); resolvedResource = createdTableOpt.get();
} }

View File

@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -204,7 +205,11 @@ public class IdHelperService {
*/ */
@Nonnull @Nonnull
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) { public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
theIds.forEach(id -> Validate.isTrue(id.hasIdPart())); for (IIdType id : theIds) {
if (!id.hasIdPart()) {
throw new InvalidRequestException("Parameter value missing in request");
}
}
if (theIds.isEmpty()) { if (theIds.isEmpty()) {
return Collections.emptyList(); return Collections.emptyList();
@ -303,7 +308,7 @@ public class IdHelperService {
if (forcedId.isPresent()) { if (forcedId.isPresent()) {
retVal.setValue(theResourceType + '/' + forcedId.get()); retVal.setValue(theResourceType + '/' + forcedId.get());
} else { } else {
retVal.setValue(theResourceType + '/' + theId.toString()); retVal.setValue(theResourceType + '/' + theId);
} }
return retVal; return retVal;

View File

@ -559,11 +559,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
myPredicateBuilder.addPredicateResourceId(theAndOrParams, theResourceName, theRequestPartitionId); myPredicateBuilder.addPredicateResourceId(theAndOrParams, theResourceName, theRequestPartitionId);
break; break;
case IAnyResource.SP_RES_LANGUAGE:
addPredicateLanguage(theAndOrParams,
null);
break;
case Constants.PARAM_HAS: case Constants.PARAM_HAS:
addPredicateHas(theResourceName, theAndOrParams, theRequest, theRequestPartitionId); addPredicateHas(theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
break; break;
@ -733,9 +728,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
null, null,
theFilter.getValue()); theFilter.getValue());
return myPredicateBuilder.addPredicateResourceId(Collections.singletonList(Collections.singletonList(param)), myResourceName, theFilter.getOperation(), theRequestPartitionId); return myPredicateBuilder.addPredicateResourceId(Collections.singletonList(Collections.singletonList(param)), myResourceName, theFilter.getOperation(), theRequestPartitionId);
} else if (theFilter.getParamPath().getName().equals(IAnyResource.SP_RES_LANGUAGE)) {
return addPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))),
theFilter.getOperation());
} }
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, theFilter.getParamPath().getName()); RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, theFilter.getParamPath().getName());
@ -828,45 +820,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
return qp; return qp;
} }
private Predicate addPredicateLanguage(List<List<IQueryParameterType>> theList,
SearchFilterParser.CompareOperation operation) {
for (List<? extends IQueryParameterType> nextList : theList) {
Set<String> values = new HashSet<>();
for (IQueryParameterType next : nextList) {
if (next instanceof StringParam) {
String nextValue = ((StringParam) next).getValue();
if (isBlank(nextValue)) {
continue;
}
values.add(nextValue);
} else {
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
}
}
if (values.isEmpty()) {
continue;
}
Predicate predicate;
if ((operation == null) ||
(operation == SearchFilterParser.CompareOperation.eq)) {
predicate = myQueryStack.get("myLanguage").as(String.class).in(values);
} else if (operation == SearchFilterParser.CompareOperation.ne) {
predicate = myQueryStack.get("myLanguage").as(String.class).in(values).not();
} else {
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
}
myQueryStack.addPredicate(predicate);
if (operation != null) {
return predicate;
}
}
return null;
}
private void addPredicateSource(List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest) { private void addPredicateSource(List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest) {
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) { for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest); addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest);

View File

@ -261,6 +261,9 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
if (theSearchParam != null) { if (theSearchParam != null) {
Set<String> valueSetUris = Sets.newHashSet(); Set<String> valueSetUris = Sets.newHashSet();
for (String nextPath : theSearchParam.getPathsSplit()) { for (String nextPath : theSearchParam.getPathsSplit()) {
if (!nextPath.startsWith(myResourceType + ".")) {
continue;
}
BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, nextPath); BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, nextPath);
if (def instanceof BaseRuntimeDeclaredChildDefinition) { if (def instanceof BaseRuntimeDeclaredChildDefinition) {
String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet(); String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet();

View File

@ -92,11 +92,15 @@ public class FhirResourceDaoSearchParameterR4 extends BaseHapiFhirResourceDao<Se
for (IPrimitiveType<?> nextBaseType : theResource.getBase()) { for (IPrimitiveType<?> nextBaseType : theResource.getBase()) {
String nextBase = nextBaseType.getValueAsString(); String nextBase = nextBaseType.getValueAsString();
RuntimeSearchParam existingSearchParam = theSearchParamRegistry.getActiveSearchParam(nextBase, theResource.getCode()); RuntimeSearchParam existingSearchParam = theSearchParamRegistry.getActiveSearchParam(nextBase, theResource.getCode());
if (existingSearchParam != null && existingSearchParam.getId() == null) { if (existingSearchParam != null) {
boolean isBuiltIn = existingSearchParam.getId() == null;
isBuiltIn |= existingSearchParam.getUri().startsWith("http://hl7.org/fhir/SearchParameter/");
if (isBuiltIn) {
throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server"); throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server");
} }
} }
} }
}
/* /*
* Everything below is validating that the SP is actually valid. We'll only do that if the * Everything below is validating that the SP is actually valid. We'll only do that if the

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy; import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
@ -93,10 +94,9 @@ public class HapiTransactionService {
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF, * known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
* which is the tag definition table. In that case, a constraint error will be * which is the tag definition table. In that case, a constraint error will be
* thrown by one of the client threads, so we auto-retry in order to avoid * thrown by one of the client threads, so we auto-retry in order to avoid
* annopying spurious failures for the client. * annoying spurious failures for the client.
*/ */
if (e.getMessage().contains("HFJ_TAG_DEF") || e.getMessage().contains("hfj_tag_def") || if (DaoFailureUtil.isTagStorageFailure(e)) {
e.getMessage().contains("HFJ_RES_TAG") || e.getMessage().contains("hfj_res_tag")) {
maxRetries = 3; maxRetries = 3;
} }

View File

@ -82,7 +82,6 @@ import org.springframework.data.domain.Sort;
import org.springframework.orm.jpa.JpaDialect; import org.springframework.orm.jpa.JpaDialect;
import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.vendor.HibernateJpaDialect; import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionDefinition;
@ -111,7 +110,6 @@ import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
@ -123,7 +121,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
public static final Integer INTEGER_0 = 0; public static final Integer INTEGER_0 = 0;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<>();
private final ExecutorService myExecutor;
@Autowired @Autowired
private FhirContext myContext; private FhirContext myContext;
@Autowired @Autowired
@ -162,8 +159,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* Constructor * Constructor
*/ */
@Autowired @Autowired
public SearchCoordinatorSvcImpl(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) { public SearchCoordinatorSvcImpl() {
myExecutor = searchCoordinatorThreadFactory.getThreadPoolExecutor(); super();
}
@VisibleForTesting
Set<String> getActiveSearchIds() {
return myIdToSearchTask.keySet();
} }
@VisibleForTesting @VisibleForTesting
@ -274,7 +276,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null); RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType, theRequestDetails, requestPartitionId); SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType, theRequestDetails, requestPartitionId);
myIdToSearchTask.put(search.getUuid(), task); myIdToSearchTask.put(search.getUuid(), task);
myExecutor.submit(task); task.call();
} }
} }
@ -406,7 +408,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId); SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId);
myIdToSearchTask.put(theSearch.getUuid(), task); myIdToSearchTask.put(theSearch.getUuid(), task);
myExecutor.submit(task); task.call();
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb); PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
@ -1087,7 +1089,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
ourLog.trace("Performing count"); ourLog.trace("Performing count");
ISearchBuilder sb = newSearchBuilder(); ISearchBuilder sb = newSearchBuilder();
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId); Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
Long count = countIterator.hasNext() ? countIterator.next() : 0; Long count = countIterator.hasNext() ? countIterator.next() : 0L;
ourLog.trace("Got count {}", count); ourLog.trace("Got count {}", count);
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager); TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);

View File

@ -434,9 +434,6 @@ public class QueryStack {
param.setValueAsQueryToken(null, null, null, theFilter.getValue()); param.setValueAsQueryToken(null, null, null, theFilter.getValue());
return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId); return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId);
} }
case IAnyResource.SP_RES_LANGUAGE: {
return theQueryStack3.createPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))), theFilter.getOperation());
}
case Constants.PARAM_SOURCE: { case Constants.PARAM_SOURCE: {
TokenParam param = new TokenParam(); TokenParam param = new TokenParam();
param.setValueAsQueryToken(null, null, null, theFilter.getValue()); param.setValueAsQueryToken(null, null, null, theFilter.getValue());
@ -579,44 +576,6 @@ public class QueryStack {
return toAndPredicate(andPredicates); return toAndPredicate(andPredicates);
} }
public Condition createPredicateLanguage(List<List<IQueryParameterType>> theList, Object theOperation) {
ResourceTablePredicateBuilder rootTable = mySqlBuilder.getOrCreateResourceTablePredicateBuilder();
List<Condition> predicates = new ArrayList<>();
for (List<? extends IQueryParameterType> nextList : theList) {
Set<String> values = new HashSet<>();
for (IQueryParameterType next : nextList) {
if (next instanceof StringParam) {
String nextValue = ((StringParam) next).getValue();
if (isBlank(nextValue)) {
continue;
}
values.add(nextValue);
} else {
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
}
}
if (values.isEmpty()) {
continue;
}
if ((theOperation == null) ||
(theOperation == SearchFilterParser.CompareOperation.eq)) {
predicates.add(rootTable.createLanguagePredicate(values, false));
} else if (theOperation == SearchFilterParser.CompareOperation.ne) {
predicates.add(rootTable.createLanguagePredicate(values, true));
} else {
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
}
}
return toAndPredicate(predicates);
}
public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName, public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName,
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList, String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
@ -729,7 +688,7 @@ public class QueryStack {
return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId); return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId);
} }
private Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn, public Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam, String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation, List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
@ -794,31 +753,31 @@ public class QueryStack {
switch (targetParamDefinition.getParamType()) { switch (targetParamDefinition.getParamType()) {
case DATE: case DATE:
containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateDate(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequestPartitionId); orValues, theOperation, theRequestPartitionId);
break; break;
case NUMBER: case NUMBER:
containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateNumber(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequestPartitionId); orValues, theOperation, theRequestPartitionId);
break; break;
case QUANTITY: case QUANTITY:
containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateQuantity(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequestPartitionId); orValues, theOperation, theRequestPartitionId);
break; break;
case STRING: case STRING:
containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateString(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequestPartitionId); orValues, theOperation, theRequestPartitionId);
break; break;
case TOKEN: case TOKEN:
containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateToken(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequestPartitionId); orValues, theOperation, theRequestPartitionId);
break; break;
case COMPOSITE: case COMPOSITE:
containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateComposite(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theRequestPartitionId); orValues, theRequestPartitionId);
break; break;
case URI: case URI:
containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition, containedCondition = createPredicateUri(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
orValues, theOperation, theRequest, theRequestPartitionId); orValues, theOperation, theRequest, theRequestPartitionId);
break; break;
case HAS: case HAS:
@ -1099,9 +1058,6 @@ public class QueryStack {
case IAnyResource.SP_RES_ID: case IAnyResource.SP_RES_ID:
return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId); return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId);
case IAnyResource.SP_RES_LANGUAGE:
return createPredicateLanguage(theAndOrParams, null);
case Constants.PARAM_HAS: case Constants.PARAM_HAS:
return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId); return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
@ -1162,11 +1118,25 @@ public class QueryStack {
break; break;
case REFERENCE: case REFERENCE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) { for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) {
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)); // TODO: The _contained parameter is not intended to control search chain interpretation like this.
else // See SMILE-2898 for details.
// For now, leave the incorrect implementation alone, just in case someone is relying on it,
// until the complete fix is available.
andPredicates.add(createPredicateReferenceForContainedResource(null, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
} else if (isEligibleForContainedResourceSearch(nextAnd)) {
// TODO for now, restrict contained reference traversal to the last reference in the chain
// We don't seem to be indexing the outbound references of a contained resource, so we can't
// include them in search chains.
// It would be nice to eventually relax this constraint, but no client seems to be asking for it.
andPredicates.add(toOrPredicate(
createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId),
createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)
));
} else {
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId)); andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
} }
}
break; break;
case STRING: case STRING:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) { for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
@ -1243,6 +1213,14 @@ public class QueryStack {
return toAndPredicate(andPredicates); return toAndPredicate(andPredicates);
} }
private boolean isEligibleForContainedResourceSearch(List<? extends IQueryParameterType> nextAnd) {
return myModelConfig.isIndexOnContainedResources() &&
nextAnd.stream()
.filter(t -> t instanceof ReferenceParam)
.map(t -> (ReferenceParam) t)
.noneMatch(t -> t.getChain().contains("."));
}
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) { public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder(); ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder();
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString); Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);

View File

@ -377,7 +377,7 @@ public class SearchBuilder implements ISearchBuilder {
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount); SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS)) { if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet()); List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
if (activeComboParams.isEmpty()) { if (activeComboParams.isEmpty()) {
sqlBuilder.setNeedResourceTableRoot(true); sqlBuilder.setNeedResourceTableRoot(true);
@ -487,6 +487,13 @@ public class SearchBuilder implements ISearchBuilder {
return Optional.of(executor); return Optional.of(executor);
} }
private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) {
return myModelConfig.isIndexOnContainedResources() && theParams.values().stream()
.flatMap(Collection::stream)
.flatMap(Collection::stream)
.anyMatch(t -> t instanceof ReferenceParam);
}
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) { private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
/* /*
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying

View File

@ -38,20 +38,18 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
import ca.uhn.fhir.jpa.search.builder.QueryStack;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.builder.QueryStack;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.CompositeParam;
@ -66,6 +64,8 @@ import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.ComboCondition;
@ -341,7 +341,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
List<Condition> orPredicates = new ArrayList<>(); List<Condition> orPredicates = new ArrayList<>();
boolean paramInverted = false; boolean paramInverted = false;
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse(); QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
for (String nextType : resourceTypes) {
String chain = theReferenceParam.getChain(); String chain = theReferenceParam.getChain();
String remainingChain = null; String remainingChain = null;
@ -351,15 +351,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
chain = chain.substring(0, chainDotIndex); chain = chain.substring(0, chainDotIndex);
} }
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
String subResourceName = typeDef.getName();
IDao dao = myDaoRegistry.getResourceDao(nextType);
if (dao == null) {
ourLog.debug("Don't have a DAO for type {}", nextType);
continue;
}
int qualifierIndex = chain.indexOf(':'); int qualifierIndex = chain.indexOf(':');
String qualifier = null; String qualifier = null;
if (qualifierIndex != -1) { if (qualifierIndex != -1) {
@ -368,6 +359,18 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
} }
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain); boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
for (String nextType : resourceTypes) {
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
String subResourceName = typeDef.getName();
IDao dao = myDaoRegistry.getResourceDao(nextType);
if (dao == null) {
ourLog.debug("Don't have a DAO for type {}", nextType);
continue;
}
RuntimeSearchParam param = null; RuntimeSearchParam param = null;
if (!isMeta) { if (!isMeta) {
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain); param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
@ -408,7 +411,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE)); andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE));
orPredicates.add(toAndPredicate(andPredicates)); orPredicates.add(toAndPredicate(andPredicates));
} }
if (candidateTargetTypes.isEmpty()) { if (candidateTargetTypes.isEmpty()) {

View File

@ -222,7 +222,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
if (retVal == null) { if (retVal == null) {
if (theSearchParam != null) { if (theSearchParam != null) {
Set<String> valueSetUris = Sets.newHashSet(); Set<String> valueSetUris = Sets.newHashSet();
for (String nextPath : theSearchParam.getPathsSplit()) { for (String nextPath : theSearchParam.getPathsSplitForResourceType(getResourceType())) {
Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition(getResourceType()).getImplementingClass(); Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition(getResourceType()).getImplementingClass();
BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(type, nextPath); BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(type, nextPath);
if (def instanceof BaseRuntimeDeclaredChildDefinition) { if (def instanceof BaseRuntimeDeclaredChildDefinition) {

View File

@ -549,7 +549,7 @@ public class SearchQueryBuilder {
} }
public ComboCondition addPredicateLastUpdated(DateRangeParam theDateRange) { public ComboCondition addPredicateLastUpdated(DateRangeParam theDateRange) {
ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder(); ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder(false);
List<Condition> conditions = new ArrayList<>(2); List<Condition> conditions = new ArrayList<>(2);
if (theDateRange.getLowerBoundAsInstant() != null) { if (theDateRange.getLowerBoundAsInstant() != null) {

View File

@ -37,6 +37,7 @@ import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.slf4j.Logger; import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Properties; import java.util.Properties;
@ -52,10 +53,10 @@ public class ElasticsearchHibernatePropertiesBuilder {
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class); private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW; private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW;
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE; private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
private String myRestUrl; private String myHosts;
private String myUsername; private String myUsername;
private String myPassword; private String myPassword;
private long myIndexManagementWaitTimeoutMillis = 10000L; private long myIndexManagementWaitTimeoutMillis = 10000L;
@ -77,11 +78,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
// the below properties are used for ElasticSearch integration // the below properties are used for ElasticSearch integration
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch"); theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName()); theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myHosts);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
if (StringUtils.isNotBlank(myUsername)) { if (StringUtils.isNotBlank(myUsername)) {
@ -99,8 +97,10 @@ public class ElasticsearchHibernatePropertiesBuilder {
theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy); theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog)); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog));
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword); //This tells elasticsearch to use our custom index naming strategy.
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
injectStartupTemplate(myProtocol, myHosts, myUsername, myPassword);
} }
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) { public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
@ -108,11 +108,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
return this; return this;
} }
public ElasticsearchHibernatePropertiesBuilder setRestUrl(String theRestUrl) { public ElasticsearchHibernatePropertiesBuilder setHosts(String hosts) {
if (theRestUrl.contains("://")) { myHosts = hosts;
throw new ConfigurationException("Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
}
myRestUrl = theRestUrl;
return this; return this;
} }
@ -147,18 +144,13 @@ public class ElasticsearchHibernatePropertiesBuilder {
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should * TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
* eventually be removed in favour of whatever solution they come up with. * eventually be removed in favour of whatever solution they come up with.
*/ */
void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) { void injectStartupTemplate(String theProtocol, String theHosts, @Nullable String theUsername, @Nullable String thePassword) {
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template") PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("resourcetable-*", "termconcept-*")) .patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50)); .settings(Settings.builder().put("index.max_ngram_diff", 50));
int colonIndex = theHostAndPort.indexOf(":");
String host = theHostAndPort.substring(0, colonIndex);
Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1));
String qualifiedHost = theProtocol + "://" + host;
try { try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword); RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theProtocol, theHosts, theUsername, thePassword);
ourLog.info("Adding starter template for large ngram diffs"); ourLog.info("Adding starter template for large ngram diffs");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT); AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged(); assert acknowledgedResponse.isAcknowledged();

View File

@ -0,0 +1,99 @@
package ca.uhn.fhir.jpa.search.elastic;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.backend.elasticsearch.logging.impl.Log;
import org.hibernate.search.util.common.logging.impl.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.lang.invoke.MethodHandles;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class instructs hibernate search on how to create index names for indexed entities.
* In our case, we use this class to add an optional prefix to all indices which are created, which can be controlled via
* {@link DaoConfig#setElasticSearchIndexPrefix(String)}.
*/
@Service
public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy {
@Autowired
private DaoConfig myDaoConfig;
static final Log log = LoggerFactory.make(Log.class, MethodHandles.lookup());
public static final String NAME = "prefix";
public static final Pattern UNIQUE_KEY_EXTRACTION_PATTERN = Pattern.compile("(.*)-\\d{6}");
public String createInitialElasticsearchIndexName(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName + "-000001");
}
public String createWriteAlias(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName +"-write");
}
public String createReadAlias(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName + "-read");
}
private String addPrefixIfNecessary(String theCandidateName) {
validateDaoConfigIsPresent();
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
return myDaoConfig.getElasticSearchIndexPrefix() + "-" + theCandidateName;
} else {
return theCandidateName;
}
}
public String extractUniqueKeyFromHibernateSearchIndexName(String hibernateSearchIndexName) {
return hibernateSearchIndexName;
}
public String extractUniqueKeyFromElasticsearchIndexName(String elasticsearchIndexName) {
Matcher matcher = UNIQUE_KEY_EXTRACTION_PATTERN.matcher(elasticsearchIndexName);
if (!matcher.matches()) {
throw log.invalidIndexPrimaryName(elasticsearchIndexName, UNIQUE_KEY_EXTRACTION_PATTERN);
} else {
String candidateUniqueKey= matcher.group(1);
return removePrefixIfNecessary(candidateUniqueKey);
}
}
private String removePrefixIfNecessary(String theCandidateUniqueKey) {
validateDaoConfigIsPresent();
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
return theCandidateUniqueKey.replace(myDaoConfig.getElasticSearchIndexPrefix() + "-", "");
} else {
return theCandidateUniqueKey;
}
}
private void validateDaoConfigIsPresent() {
if (myDaoConfig == null) {
throw new ConfigurationException("While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the DaoConfig. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean.");
}
}
}

View File

@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.search.lastn;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.ConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope; import org.apache.http.auth.AuthScope;
@ -27,41 +29,47 @@ import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider; import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class ElasticsearchRestClientFactory { public class ElasticsearchRestClientFactory {
private static String determineScheme(String theHostname) { static public RestHighLevelClient createElasticsearchHighLevelRestClient(
int schemeIdx = theHostname.indexOf("://"); String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) {
if (schemeIdx > 0) {
return theHostname.substring(0, schemeIdx); if (hosts.contains("://")) {
} else { throw new ConfigurationException("Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
return "http";
} }
String[] hostArray = hosts.split(",");
List<Node> clientNodes = Arrays.stream(hostArray)
.map(String::trim)
.filter(s -> s.contains(":"))
.map(h -> {
int colonIndex = h.indexOf(":");
String host = h.substring(0, colonIndex);
int port = Integer.parseInt(h.substring(colonIndex + 1));
return new Node(new HttpHost(host, port, protocol));
})
.collect(Collectors.toList());
if (hostArray.length != clientNodes.size()) {
throw new ConfigurationException("Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202");
} }
private static String stripHostOfScheme(String theHostname) { RestClientBuilder clientBuilder = RestClient.builder(clientNodes.toArray(new Node[0]));
int schemeIdx = theHostname.indexOf("://"); if (StringUtils.isNotBlank(theUsername) && StringUtils.isNotBlank(thePassword)) {
if (schemeIdx > 0) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
return theHostname.substring(schemeIdx + 3); credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword));
} else { clientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
return theHostname;
}
}
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
final CredentialsProvider credentialsProvider =
new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(theUsername, thePassword));
RestClientBuilder clientBuilder = RestClient.builder(
new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname)))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider)); .setDefaultCredentialsProvider(credentialsProvider));
}
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")}; Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
clientBuilder.setDefaultHeaders(defaultHeaders); clientBuilder.setDefaultHeaders(defaultHeaders);

View File

@ -68,11 +68,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -125,13 +125,13 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
private PartitionSettings myPartitionSettings; private PartitionSettings myPartitionSettings;
//This constructor used to inject a dummy partitionsettings in test. //This constructor used to inject a dummy partitionsettings in test.
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) { public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
this(theHostname, thePort, theUsername, thePassword); this(theHostname, theUsername, thePassword);
this.myPartitionSettings = thePartitionSetings; this.myPartitionSettings = thePartitionSetings;
} }
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) { public ElasticsearchSvcImpl(String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword); myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", theHostname, theUsername, thePassword);
try { try {
createObservationIndexIfMissing(); createObservationIndexIfMissing();

View File

@ -172,6 +172,10 @@ public class MemoryCacheService {
return (Cache<K, T>) myCaches.get(theCache); return (Cache<K, T>) myCaches.get(theCache);
} }
public long getEstimatedSize(CacheEnum theCache) {
return getCache(theCache).estimatedSize();
}
public enum CacheEnum { public enum CacheEnum {
TAG_DEFINITION(TagDefinitionCacheKey.class), TAG_DEFINITION(TagDefinitionCacheKey.class),

View File

@ -126,5 +126,8 @@ public class SqlQuery {
} }
@Override
public String toString() {
return getSql(true, true);
}
} }

View File

@ -1,16 +1,15 @@
package ca.uhn.fhir.jpa.bulk; package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.client.apache.ResourceEntity; import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@ -55,11 +54,8 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.eq; import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
@ -74,7 +70,7 @@ public class BulkDataExportProviderTest {
private static final String GROUP_ID = "Group/G2401"; private static final String GROUP_ID = "Group/G2401";
private static final String G_JOB_ID = "0000000-GGGGGG"; private static final String G_JOB_ID = "0000000-GGGGGG";
private Server myServer; private Server myServer;
private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private final FhirContext myCtx = FhirContext.forR4Cached();
private int myPort; private int myPort;
@Mock @Mock
private IBulkDataExportSvc myBulkDataExportSvc; private IBulkDataExportSvc myBulkDataExportSvc;
@ -478,7 +474,7 @@ public class BulkDataExportProviderTest {
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo); when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);; + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);

View File

@ -21,12 +21,12 @@ public class ResourceVersionCacheSvcTest extends BaseJpaR4Test {
IIdType patientId = myPatientDao.create(patient).getId(); IIdType patientId = myPatientDao.create(patient).getId();
ResourceVersionMap versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous()); ResourceVersionMap versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
assertEquals(1, versionMap.size()); assertEquals(1, versionMap.size());
assertEquals("1", versionMap.getVersion(patientId)); assertEquals(1L, versionMap.getVersion(patientId));
patient.setGender(Enumerations.AdministrativeGender.MALE); patient.setGender(Enumerations.AdministrativeGender.MALE);
myPatientDao.update(patient); myPatientDao.update(patient);
versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous()); versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
assertEquals(1, versionMap.size()); assertEquals(1, versionMap.size());
assertEquals("2", versionMap.getVersion(patientId)); assertEquals(2L, versionMap.getVersion(patientId));
} }
} }

View File

@ -0,0 +1,147 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.config.BlockLargeNumbersOfParamsListener;
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.dao.r4.ElasticsearchPrefixTest;
import ca.uhn.fhir.jpa.search.elastic.HapiElasticsearchAnalysisConfigurer;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.elasticsearch.common.settings.Settings;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings;
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
/**
* The only reason this is its own class is so that we can set a dao config setting before the whole test framework comes online.
* We need to do this as it is during bean creation that HS bootstrapping occurs.
*/
@Configuration
public class ElasticsearchWithPrefixConfig {
@Bean
public DaoConfig daoConfig() {
DaoConfig daoConfig = new DaoConfig();
daoConfig.setElasticSearchIndexPrefix(ElasticsearchPrefixTest.ELASTIC_PREFIX);
return daoConfig;
}
@Bean
public IndexNamePrefixLayoutStrategy indexNamePrefixLayoutStrategy() {
return new IndexNamePrefixLayoutStrategy();
}
@Bean
public FhirContext fhirContext() {
return FhirContext.forR4();
}
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());
return retVal;
}
@Bean
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:testdb_r4");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
SLF4JLogLevel level = SLF4JLogLevel.INFO;
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS, level)
.beforeQuery(new BlockLargeNumbersOfParamsListener())
.afterQuery(new CurrentThreadCaptureQueriesListener())
.build();
return dataSource;
}
@Bean
public Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
//Override default lucene settings
// Force elasticsearch to start first
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port
String host = elasticContainer().getHost();
// the below properties are used for ElasticSearch integration
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), host + ":" + httpPort);
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), "http");
extraProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, SchemaManagementStrategyName.CREATE.externalRepresentation());
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(10000));
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), IndexStatus.YELLOW.externalRepresentation());
// Need the mapping to be dynamic because of terminology indexes.
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true");
// Only for unit tests
extraProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, "read-sync");
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(true));
//This tells elasticsearch to use our custom index naming strategy.
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50));
try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", host + ":" + httpPort, "", "");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged();
} catch (IOException theE) {
theE.printStackTrace();
throw new ConfigurationException("Couldn't connect to the elasticsearch server to create necessary templates. Ensure the Elasticsearch user has permissions to create templates.");
}
return extraProperties;
}
@Bean
public ElasticsearchContainer elasticContainer() {
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
embeddedElasticSearch.start();
return embeddedElasticSearch;
}
}

View File

@ -16,6 +16,7 @@ import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -134,8 +135,8 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2"); retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2");
retVal.setDataSource(dataSource()); retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties()); retVal.setJpaProperties(jpaProperties());

View File

@ -15,6 +15,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings; import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -138,8 +139,8 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3"); retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
retVal.setDataSource(dataSource()); retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties()); retVal.setJpaProperties(jpaProperties());

View File

@ -3,12 +3,14 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil; import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber; import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig; import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
import ca.uhn.fhir.test.utilities.BatchJobHelper; import ca.uhn.fhir.test.utilities.BatchJobHelper;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.springframework.batch.core.explore.JobExplorer; import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.config; package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
@ -14,6 +15,8 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -138,10 +141,15 @@ public class TestR4Config extends BaseJavaConfigR4 {
return new SingleQueryCountHolder(); return new SingleQueryCountHolder();
} }
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, fhirContext());
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4"); retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource()); retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties()); retVal.setJpaProperties(jpaProperties());

View File

@ -1,13 +1,19 @@
package ca.uhn.fhir.jpa.config; package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder; import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper; import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
import org.h2.index.Index;
import org.hibernate.search.backend.elasticsearch.index.IndexStatus; import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.elasticsearch.ElasticsearchContainer;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
@ -35,7 +41,7 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE) .setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
.setIndexManagementWaitTimeoutMillis(10000) .setIndexManagementWaitTimeoutMillis(10000)
.setRequiredIndexStatus(IndexStatus.YELLOW) .setRequiredIndexStatus(IndexStatus.YELLOW)
.setRestUrl(host+ ":" + httpPort) .setHosts(host + ":" + httpPort)
.setProtocol("http") .setProtocol("http")
.setUsername("") .setUsername("")
.setPassword("") .setPassword("")

View File

@ -21,7 +21,7 @@ public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElastic
public ElasticsearchSvcImpl myElasticsearchSvc() { public ElasticsearchSvcImpl myElasticsearchSvc() {
int elasticsearchPort = elasticContainer().getMappedPort(9200); int elasticsearchPort = elasticContainer().getMappedPort(9200);
String host = elasticContainer().getHost(); String host = elasticContainer().getHost();
return new ElasticsearchSvcImpl(host, elasticsearchPort, "", ""); return new ElasticsearchSvcImpl(host + ":" + elasticsearchPort, null, null);
} }
@PreDestroy @PreDestroy

View File

@ -3,9 +3,11 @@ package ca.uhn.fhir.jpa.config;
import java.util.Properties; import java.util.Properties;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.engine.cfg.BackendSettings; import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
@ -27,8 +29,8 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setDataSource(dataSource()); retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties()); retVal.setJpaProperties(jpaProperties());
return retVal; return retVal;

View File

@ -16,6 +16,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings; import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -138,8 +139,8 @@ public class TestR5Config extends BaseJavaConfigR5 {
@Override @Override
@Bean @Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaR5"); retVal.setPersistenceUnitName("PU_HapiFhirJpaR5");
retVal.setDataSource(dataSource()); retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties()); retVal.setJpaProperties(jpaProperties());

View File

@ -13,6 +13,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
class BaseHapiFhirResourceDaoTest { class BaseHapiFhirResourceDaoTest {
TestResourceDao mySvc = new TestResourceDao(); TestResourceDao mySvc = new TestResourceDao();
@Test @Test

View File

@ -721,6 +721,4 @@ public abstract class BaseJpaTest extends BaseTest {
} }
Thread.sleep(500); Thread.sleep(500);
} }
} }

View File

@ -1,19 +1,18 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.LenientErrorHandler; import ca.uhn.fhir.parser.LenientErrorHandler;
import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Observation;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.hamcrest.MatcherAssert.assertThat;
public class TolerantJsonParserR4Test { public class TolerantJsonParserR4Test {
private FhirContext myFhirContext = FhirContext.forCached(FhirVersionEnum.R4); private final FhirContext myFhirContext = FhirContext.forR4Cached();
@Test @Test
public void testParseInvalidNumeric_LeadingDecimal() { public void testParseInvalidNumeric_LeadingDecimal() {

View File

@ -1,10 +1,10 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4; import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.internal.SessionImpl; import org.hibernate.internal.SessionImpl;
@ -70,6 +71,10 @@ public class TransactionProcessorTest {
private MatchUrlService myMatchUrlService; private MatchUrlService myMatchUrlService;
@MockBean @MockBean
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@MockBean
private IResourceVersionSvc myResourceVersionSvc;
@MockBean
private SearchParamMatcher mySearchParamMatcher;
@MockBean(answer = Answers.RETURNS_DEEP_STUBS) @MockBean(answer = Answers.RETURNS_DEEP_STUBS)
private SessionImpl mySession; private SessionImpl mySession;
@ -120,7 +125,7 @@ public class TransactionProcessorTest {
@Bean @Bean
public FhirContext fhirContext() { public FhirContext fhirContext() {
return FhirContext.forCached(FhirVersionEnum.R4); return FhirContext.forR4Cached();
} }
@Bean @Bean

View File

@ -1033,7 +1033,7 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
myPatientDao.search(map).size(); myPatientDao.search(map).size();
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
} }
@ -1070,7 +1070,7 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
myPatientDao.search(map).size(); myPatientDao.search(map).size();
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
// Try with normal gender SP // Try with normal gender SP

View File

@ -727,9 +727,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
params.add("_id", new StringDt("TEST")); params.add("_id", new StringDt("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size()); assertEquals(1, toList(myPatientDao.search(params)).size());
params.add("_language", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST")); params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size()); assertEquals(1, toList(myPatientDao.search(params)).size());
@ -744,9 +741,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
params.add("_id", new StringDt("TEST")); params.add("_id", new StringDt("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size()); assertEquals(0, toList(myPatientDao.search(params)).size());
params.add("_language", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST")); params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size()); assertEquals(0, toList(myPatientDao.search(params)).size());
@ -766,148 +760,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testSearchLanguageParam() {
IIdType id1;
{
Patient patient = new Patient();
patient.getLanguage().setValue("en_CA");
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe");
id1 = myPatientDao.create(patient, mySrd).getId();
}
IIdType id2;
{
Patient patient = new Patient();
patient.getLanguage().setValue("en_US");
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().addFamily("testSearchLanguageParam").addGiven("John");
id2 = myPatientDao.create(patient, mySrd).getId();
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_CA"));
List<IResource> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id1.toUnqualifiedVersionless(), patients.get(0).getId().toUnqualifiedVersionless());
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_US"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id2.toUnqualifiedVersionless(), patients.get(0).getId().toUnqualifiedVersionless());
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_GB"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(0, patients.size());
}
}
@Test
public void testSearchLanguageParamAndOr() {
IIdType id1;
{
Patient patient = new Patient();
patient.getLanguage().setValue("en_CA");
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
Date betweenTime = new Date();
IIdType id2;
{
Patient patient = new Patient();
patient.getLanguage().setValue("en_US");
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().addFamily("testSearchLanguageParam").addGiven("John");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
params.setLastUpdated(new DateRangeParam(betweenTime, null));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")));
params.add(BaseResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
params.add(BaseResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
params.add(BaseResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(BaseResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_id", new StringParam(id1.getIdPart()));
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(BaseResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(BaseResource.SP_RES_LANGUAGE, and);
params.add("_id", new StringParam(id1.getIdPart()));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
}
@Test @Test
public void testSearchLastUpdatedParam() throws InterruptedException { public void testSearchLastUpdatedParam() throws InterruptedException {
String methodName = "testSearchLastUpdatedParam"; String methodName = "testSearchLastUpdatedParam";

View File

@ -224,7 +224,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
} }
@Test @Test
public void testCantSearchForDeletedResourceByLanguageOrTag() { public void testCantSearchForDeletedResourceByTag() {
String methodName = "testCantSearchForDeletedResourceByLanguageOrTag"; String methodName = "testCantSearchForDeletedResourceByLanguageOrTag";
Organization org = new Organization(); Organization org = new Organization();
org.setLanguage(new CodeDt("EN_ca")); org.setLanguage(new CodeDt("EN_ca"));
@ -236,9 +236,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless(); IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map = new SearchParameterMap(); SearchParameterMap map;
map.add("_language", new StringParam("EN_ca"));
assertEquals(1, myOrganizationDao.search(map).size().intValue());
map = new SearchParameterMap(); map = new SearchParameterMap();
map.add("_tag", new TokenParam(methodName, methodName)); map.add("_tag", new TokenParam(methodName, methodName));
@ -246,10 +244,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
myOrganizationDao.delete(orgId, mySrd); myOrganizationDao.delete(orgId, mySrd);
map = new SearchParameterMap();
map.add("_language", new StringParam("EN_ca"));
assertEquals(0, myOrganizationDao.search(map).size().intValue());
map = new SearchParameterMap(); map = new SearchParameterMap();
map.add("_tag", new TokenParam(methodName, methodName)); map.add("_tag", new TokenParam(methodName, methodName));
assertEquals(0, myOrganizationDao.search(map).size().intValue()); assertEquals(0, myOrganizationDao.search(map).size().intValue());
@ -1603,7 +1597,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
found = toList(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01")))); found = toList(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01"))));
assertEquals(0, found.size()); assertEquals(0, found.size());
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
} }

View File

@ -1015,7 +1015,7 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
myPatientDao.search(map).size(); myPatientDao.search(map).size();
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
} }
@ -1053,7 +1053,7 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
myPatientDao.search(map).size(); myPatientDao.search(map).size();
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
// Try with normal gender SP // Try with normal gender SP

View File

@ -1192,11 +1192,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
params.add("_id", new StringParam("TEST")); params.add("_id", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size()); assertEquals(1, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_language", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap(); params = new SearchParameterMap();
params.setLoadSynchronous(true); params.setLoadSynchronous(true);
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST")); params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
@ -1214,11 +1209,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
params.add("_id", new StringParam("TEST")); params.add("_id", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size()); assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_language", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap(); params = new SearchParameterMap();
params.setLoadSynchronous(true); params.setLoadSynchronous(true);
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST")); params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
@ -1241,143 +1231,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
} }
} }
@Test
public void testSearchLanguageParam() {
IIdType id1;
{
Patient patient = new Patient();
patient.getLanguageElement().setValue("en_CA");
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("testSearchLanguageParam").addGiven("Joe");
id1 = myPatientDao.create(patient, mySrd).getId();
}
IIdType id2;
{
Patient patient = new Patient();
patient.getLanguageElement().setValue("en_US");
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("testSearchLanguageParam").addGiven("John");
id2 = myPatientDao.create(patient, mySrd).getId();
}
SearchParameterMap params;
{
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_CA"));
List<IBaseResource> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id1.toUnqualifiedVersionless(), patients.get(0).getIdElement().toUnqualifiedVersionless());
}
{
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_US"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id2.toUnqualifiedVersionless(), patients.get(0).getIdElement().toUnqualifiedVersionless());
}
{
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_GB"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(0, patients.size());
}
}
@Test
public void testSearchLanguageParamAndOr() {
IIdType id1;
{
Patient patient = new Patient();
patient.getLanguageElement().setValue("en_CA");
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("testSearchLanguageParam").addGiven("Joe");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
Date betweenTime = new Date();
IIdType id2;
{
Patient patient = new Patient();
patient.getLanguageElement().setValue("en_US");
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("testSearchLanguageParam").addGiven("John");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
params.setLastUpdated(new DateRangeParam(betweenTime, null));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
}
{
SearchParameterMap params = new SearchParameterMap();
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
}
{
SearchParameterMap params = new SearchParameterMap();
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringParam(id1.getIdPart()));
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
{
SearchParameterMap params = new SearchParameterMap();
StringAndListParam and = new StringAndListParam();
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
params.add(IAnyResource.SP_RES_LANGUAGE, and);
params.add("_id", new StringParam(id1.getIdPart()));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
}
@Test @Test
public void testSearchLastUpdatedParam() { public void testSearchLastUpdatedParam() {
String methodName = "testSearchLastUpdatedParam"; String methodName = "testSearchLastUpdatedParam";

View File

@ -206,21 +206,12 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless(); IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map = new SearchParameterMap(); SearchParameterMap map = new SearchParameterMap();
map.add("_language", new StringParam("EN_ca"));
assertEquals(1, myOrganizationDao.search(map).size().intValue());
map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add("_tag", new TokenParam(methodName, methodName)); map.add("_tag", new TokenParam(methodName, methodName));
assertEquals(1, myOrganizationDao.search(map).size().intValue()); assertEquals(1, myOrganizationDao.search(map).size().intValue());
myOrganizationDao.delete(orgId, mySrd); myOrganizationDao.delete(orgId, mySrd);
map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add("_language", new StringParam("EN_ca"));
assertEquals(0, myOrganizationDao.search(map).size().intValue());
map = new SearchParameterMap(); map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add("_tag", new TokenParam(methodName, methodName)); map.add("_tag", new TokenParam(methodName, methodName));
@ -2014,7 +2005,7 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01")).setLoadSynchronous(true))); found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01")).setLoadSynchronous(true)));
assertEquals(0, found.size()); assertEquals(0, found.size());
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage()); assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
} }
} }

View File

@ -1,35 +0,0 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class IdHelperServiceTest {
@Test
public void testReplaceDefault_AllPartitions() {
IdHelperService svc = new IdHelperService();
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setDefaultPartitionId(1);
svc.setPartitionSettingsForUnitTest(partitionSettings);
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.allPartitions());
assertSame(RequestPartitionId.allPartitions(), outcome);
}
@Test
public void testReplaceDefault_DefaultPartition() {
IdHelperService svc = new IdHelperService();
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setDefaultPartitionId(1);
svc.setPartitionSettingsForUnitTest(partitionSettings);
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.defaultPartition());
assertEquals(1, outcome.getPartitionIds().get(0));
}
}

View File

@ -0,0 +1,209 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class ResourceVersionSvcTest {
// helper class to package up data for helper methods
private class ResourceIdPackage {
public IIdType MyResourceId;
public ResourcePersistentId MyPid;
public Long MyVersion;
public ResourceIdPackage(IIdType id,
ResourcePersistentId pid,
Long version) {
MyResourceId = id;
MyPid = pid;
MyVersion = version;
}
}
@Mock
DaoRegistry myDaoRegistry;
@Mock
IResourceTableDao myResourceTableDao;
@Mock
IdHelperService myIdHelperService;
// TODO KHS move the methods that use this out to a separate test class
@InjectMocks
private ResourceVersionSvcDaoImpl myResourceVersionSvc;
/**
* Gets a ResourceTable record for getResourceVersionsForPid
* Order matters!
* @param resourceType
* @param pid
* @param version
* @return
*/
private Object[] getResourceTableRecordForResourceTypeAndPid(String resourceType, long pid, long version) {
return new Object[] {
pid, // long
resourceType, // string
version // long
};
}
/**
* Helper function to mock out resolveResourcePersistentIdsWithCache
* to return empty lists (as if no resources were found).
*/
private void mock_resolveResourcePersistentIdsWithCache_toReturnNothing() {
CriteriaBuilder cb = Mockito.mock(CriteriaBuilder.class);
CriteriaQuery<ForcedId> criteriaQuery = Mockito.mock(CriteriaQuery.class);
Root<ForcedId> from = Mockito.mock(Root.class);
Path path = Mockito.mock(Path.class);
TypedQuery<ForcedId> queryMock = Mockito.mock(TypedQuery.class);
}
/**
* Helper function to mock out getIdsOfExistingResources
* to return the matches and resources matching those provided
* by parameters.
* @param theResourcePacks
*/
private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theResourcePacks) {
List<ResourcePersistentId> resourcePersistentIds = new ArrayList<>();
List<Object[]> matches = new ArrayList<>();
for (ResourceIdPackage pack : theResourcePacks) {
resourcePersistentIds.add(pack.MyPid);
matches.add(getResourceTableRecordForResourceTypeAndPid(
pack.MyResourceId.getResourceType(),
pack.MyPid.getIdAsLong(),
pack.MyVersion
));
}
ResourcePersistentId first = resourcePersistentIds.remove(0);
if (resourcePersistentIds.isEmpty()) {
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first));
}
else {
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds);
}
}
@Test
public void getLatestVersionIdsForResourceIds_whenResourceExists_returnsMapWithPIDAndVersion() {
IIdType type = new IdDt("Patient/RED");
ResourcePersistentId pid = new ResourcePersistentId(1L);
pid.setAssociatedResourceId(type);
HashMap<IIdType, ResourcePersistentId> map = new HashMap<>();
map.put(type, pid);
ResourceIdPackage pack = new ResourceIdPackage(type, pid, 2L);
// when
mockReturnsFor_getIdsOfExistingResources(pack);
// test
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
Collections.singletonList(type));
Assertions.assertTrue(retMap.containsKey(type));
Assertions.assertEquals(pid.getVersion(), map.get(type).getVersion());
}
@Test
public void getLatestVersionIdsForResourceIds_whenResourceDoesNotExist_returnsEmptyMap() {
IIdType type = new IdDt("Patient/RED");
// when
mock_resolveResourcePersistentIdsWithCache_toReturnNothing();
// test
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
Collections.singletonList(type));
Assertions.assertTrue(retMap.isEmpty());
}
@Test
public void getLatestVersionIdsForResourceIds_whenSomeResourcesDoNotExist_returnsOnlyExistingElements() {
// resource to be found
IIdType type = new IdDt("Patient/RED");
ResourcePersistentId pid = new ResourcePersistentId(1L);
pid.setAssociatedResourceId(type);
ResourceIdPackage pack = new ResourceIdPackage(type, pid, 2L);
// resource that won't be found
IIdType type2 = new IdDt("Patient/BLUE");
// when
mock_resolveResourcePersistentIdsWithCache_toReturnNothing();
mockReturnsFor_getIdsOfExistingResources(pack);
// test
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(
RequestPartitionId.allPartitions(),
Arrays.asList(type, type2)
);
// verify
Assertions.assertEquals(1, retMap.size());
Assertions.assertTrue(retMap.containsKey(type));
Assertions.assertFalse(retMap.containsKey(type2));
}
@Test
public void testReplaceDefault_AllPartitions() {
IdHelperService svc = new IdHelperService();
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setDefaultPartitionId(1);
svc.setPartitionSettingsForUnitTest(partitionSettings);
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.allPartitions());
assertSame(RequestPartitionId.allPartitions(), outcome);
}
@Test
public void testReplaceDefault_DefaultPartition() {
IdHelperService svc = new IdHelperService();
PartitionSettings partitionSettings = new PartitionSettings();
partitionSettings.setDefaultPartitionId(1);
svc.setPartitionSettingsForUnitTest(partitionSettings);
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.defaultPartition());
assertEquals(1, outcome.getPartitionIds().get(0));
}
}

View File

@ -0,0 +1,388 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class ChainingR4SearchTest extends BaseJpaR4Test {
@Autowired
MatchUrlService myMatchUrlService;
@AfterEach
public void after() throws Exception {
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
myDaoConfig.setCountSearchResultsUpTo(new DaoConfig().getCountSearchResultsUpTo());
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myModelConfig.setIndexOnContainedResources(false);
myModelConfig.setIndexOnContainedResources(new ModelConfig().isIndexOnContainedResources());
}
@BeforeEach
public void before() throws Exception {
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
myModelConfig.setIndexOnContainedResources(true);
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
}
@Test
public void testShouldResolveATwoLinkChainWithStandAloneResources() throws Exception {
// setup
IIdType oid1;
{
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("Smith").addGiven("John");
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.name=Smith";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveATwoLinkChainWithAContainedResource() throws Exception {
// setup
IIdType oid1;
{
Patient p = new Patient();
p.setId("pat");
p.addName().setFamily("Smith").addGiven("John");
Observation obs = new Observation();
obs.getContained().add(p);
obs.getCode().setText("Observation 1");
obs.setValue(new StringType("Test"));
obs.getSubject().setReference("#pat");
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.name=Smith";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() throws Exception {
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId(IdType.newRandomUuid());
org.setName("HealthCo");
myOrganizationDao.create(org, mySrd);
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference(org.getId());
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.organization.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() throws Exception {
// This is the case that is most relevant to SMILE-2899
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId("org");
org.setName("HealthCo");
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.getContained().add(org);
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference("#org");
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.organization.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
@Disabled
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() throws Exception {
// We do not currently support this case - we may not be indexing the references of contained resources
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId(IdType.newRandomUuid());
org.setName("HealthCo");
myOrganizationDao.create(org, mySrd);
Patient p = new Patient();
p.setId("pat");
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference(org.getId());
Observation obs = new Observation();
obs.getContained().add(p);
obs.getCode().setText("Observation 1");
obs.getSubject().setReference("#pat");
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.organization.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() throws Exception {
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId(IdType.newRandomUuid());
org.setName("HealthCo");
myOrganizationDao.create(org, mySrd);
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference(org.getId());
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() throws Exception {
// This is the case that is most relevant to SMILE-2899
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId("org");
org.setName("HealthCo");
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.getContained().add(org);
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference("#org");
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() throws Exception {
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId(IdType.newRandomUuid());
org.setName("HealthCo");
myOrganizationDao.create(org, mySrd);
Organization partOfOrg = new Organization();
partOfOrg.setId(IdType.newRandomUuid());
partOfOrg.getPartOf().setReference(org.getId());
myOrganizationDao.create(partOfOrg, mySrd);
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference(partOfOrg.getId());
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.organization.partof.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
@Test
public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() throws Exception {
// setup
IIdType oid1;
{
Organization org = new Organization();
org.setId("parent");
org.setName("HealthCo");
Organization partOfOrg = new Organization();
partOfOrg.setId(IdType.newRandomUuid());
partOfOrg.getContained().add(org);
partOfOrg.getPartOf().setReference("#parent");
myOrganizationDao.create(partOfOrg, mySrd);
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("Smith").addGiven("John");
p.getManagingOrganization().setReference(partOfOrg.getId());
myPatientDao.create(p, mySrd);
Observation obs = new Observation();
obs.getCode().setText("Observation 1");
obs.getSubject().setReference(p.getId());
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
String url = "/Observation?subject.organization.partof.name=HealthCo";
// execute
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
// validate
assertEquals(1L, oids.size());
assertThat(oids, contains(oid1.getIdPart()));
}
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String theUrl) throws IOException {
List<String> ids = new ArrayList<>();
ResourceSearch search = myMatchUrlService.getResourceSearch(theUrl);
SearchParameterMap map = search.getSearchParameterMap();
map.setLoadSynchronous(true);
IBundleProvider result = myObservationDao.search(map);
return result.getAllResourceIds();
}
}

Some files were not shown because too many files have changed in this diff Show More