Merge branch 'master' into 2849_add_new_mdm_param
This commit is contained in:
commit
8bb9aca8df
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -225,6 +225,14 @@ public class FhirContext {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 5.6.0
|
||||||
|
*/
|
||||||
|
public static FhirContext forDstu2Cached() {
|
||||||
|
return forCached(FhirVersionEnum.DSTU2);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -73,7 +73,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
|
||||||
class ModelScanner {
|
class ModelScanner {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ModelScanner.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ModelScanner.class);
|
||||||
|
|
||||||
private Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinitions = new HashMap<>();
|
private Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinitions = new HashMap<>();
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
private Map<String, RuntimeResourceDefinition> myIdToResourceDefinition = new HashMap<>();
|
private Map<String, RuntimeResourceDefinition> myIdToResourceDefinition = new HashMap<>();
|
||||||
|
@ -90,6 +89,7 @@ class ModelScanner {
|
||||||
@Nonnull Collection<Class<? extends IBase>> theResourceTypes) throws ConfigurationException {
|
@Nonnull Collection<Class<? extends IBase>> theResourceTypes) throws ConfigurationException {
|
||||||
myContext = theContext;
|
myContext = theContext;
|
||||||
myVersion = theVersion;
|
myVersion = theVersion;
|
||||||
|
|
||||||
Set<Class<? extends IBase>> toScan = new HashSet<>(theResourceTypes);
|
Set<Class<? extends IBase>> toScan = new HashSet<>(theResourceTypes);
|
||||||
init(theExistingDefinitions, toScan);
|
init(theExistingDefinitions, toScan);
|
||||||
}
|
}
|
||||||
|
@ -405,8 +405,8 @@ class ModelScanner {
|
||||||
List<RuntimeSearchParam.Component> components = null;
|
List<RuntimeSearchParam.Component> components = null;
|
||||||
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
|
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
|
||||||
components = new ArrayList<>();
|
components = new ArrayList<>();
|
||||||
for (String next : searchParam.compositeOf()) {
|
for (String name : searchParam.compositeOf()) {
|
||||||
String ref = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + next;
|
String ref = toCanonicalSearchParameterUri(theResourceDef, name);
|
||||||
components.add(new RuntimeSearchParam.Component(null, ref));
|
components.add(new RuntimeSearchParam.Component(null, ref));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -414,7 +414,8 @@ class ModelScanner {
|
||||||
Collection<String> base = Collections.singletonList(theResourceDef.getName());
|
Collection<String> base = Collections.singletonList(theResourceDef.getName());
|
||||||
String url = null;
|
String url = null;
|
||||||
if (theResourceDef.isStandardType()) {
|
if (theResourceDef.isStandardType()) {
|
||||||
url = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + searchParam.name();
|
String name = searchParam.name();
|
||||||
|
url = toCanonicalSearchParameterUri(theResourceDef, name);
|
||||||
}
|
}
|
||||||
RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base);
|
RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base);
|
||||||
theResourceDef.addSearchParam(param);
|
theResourceDef.addSearchParam(param);
|
||||||
|
@ -424,6 +425,10 @@ class ModelScanner {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String toCanonicalSearchParameterUri(RuntimeResourceDefinition theResourceDef, String theName) {
|
||||||
|
return "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName() + "-" + theName;
|
||||||
|
}
|
||||||
|
|
||||||
private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) {
|
private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) {
|
||||||
HashSet<String> retVal = new HashSet<>();
|
HashSet<String> retVal = new HashSet<>();
|
||||||
|
|
||||||
|
|
|
@ -233,18 +233,7 @@ public class RuntimeSearchParam {
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getPathsSplit() {
|
public List<String> getPathsSplit() {
|
||||||
String path = getPath();
|
return getPathsSplitForResourceType(null);
|
||||||
if (path.indexOf('|') == -1) {
|
|
||||||
return Collections.singletonList(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
List<String> retVal = new ArrayList<>();
|
|
||||||
StringTokenizer tok = new StringTokenizer(path, "|");
|
|
||||||
while (tok.hasMoreElements()) {
|
|
||||||
String nextPath = tok.nextToken().trim();
|
|
||||||
retVal.add(nextPath.trim());
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -266,6 +255,41 @@ public class RuntimeSearchParam {
|
||||||
return myPhoneticEncoder.encode(theString);
|
return myPhoneticEncoder.encode(theString);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> getPathsSplitForResourceType(@Nullable String theResourceName) {
|
||||||
|
String path = getPath();
|
||||||
|
if (path.indexOf('|') == -1) {
|
||||||
|
if (theResourceName != null && !pathMatchesResourceType(theResourceName, path)) {
|
||||||
|
return Collections.emptyList();
|
||||||
|
}
|
||||||
|
return Collections.singletonList(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
List<String> retVal = new ArrayList<>();
|
||||||
|
StringTokenizer tok = new StringTokenizer(path, "|");
|
||||||
|
while (tok.hasMoreElements()) {
|
||||||
|
String nextPath = tok.nextToken().trim();
|
||||||
|
if (theResourceName != null && !pathMatchesResourceType(theResourceName, nextPath)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
retVal.add(nextPath.trim());
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean pathMatchesResourceType(String theResourceName, String thePath) {
|
||||||
|
if (thePath.startsWith(theResourceName + ".")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (thePath.startsWith("Resouce.") || thePath.startsWith("DomainResource.")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (Character.isLowerCase(thePath.charAt(0))) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
public enum RuntimeSearchParamStatusEnum {
|
public enum RuntimeSearchParamStatusEnum {
|
||||||
ACTIVE,
|
ACTIVE,
|
||||||
DRAFT,
|
DRAFT,
|
||||||
|
|
|
@ -961,6 +961,7 @@ public class FhirTerser {
|
||||||
for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
|
for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
|
||||||
|
|
||||||
List<?> values = nextChild.getAccessor().getValues(theElement);
|
List<?> values = nextChild.getAccessor().getValues(theElement);
|
||||||
|
|
||||||
if (values != null) {
|
if (values != null) {
|
||||||
for (Object nextValueObject : values) {
|
for (Object nextValueObject : values) {
|
||||||
IBase nextValue;
|
IBase nextValue;
|
||||||
|
|
|
@ -28,13 +28,6 @@ import ca.uhn.fhir.rest.gclient.TokenClientParam;
|
||||||
*/
|
*/
|
||||||
public interface IAnyResource extends IBaseResource {
|
public interface IAnyResource extends IBaseResource {
|
||||||
|
|
||||||
/**
|
|
||||||
* Search parameter constant for <b>_language</b>
|
|
||||||
*/
|
|
||||||
@SearchParamDefinition(name="_language", path="", description="The language of the resource", type="string" )
|
|
||||||
String SP_RES_LANGUAGE = "_language";
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search parameter constant for <b>_id</b>
|
* Search parameter constant for <b>_id</b>
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -3,14 +3,14 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-bom</artifactId>
|
<artifactId>hapi-fhir-bom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<name>HAPI FHIR BOM</name>
|
<name>HAPI FHIR BOM</name>
|
||||||
|
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -282,7 +282,7 @@ public abstract class BaseApp {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<BaseCommand> parseCommand(String[] theArgs) {
|
private Optional<BaseCommand> parseCommand(String[] theArgs) {
|
||||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs);
|
Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 0);
|
||||||
|
|
||||||
if (! commandOpt.isPresent()) {
|
if (! commandOpt.isPresent()) {
|
||||||
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
|
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
|
||||||
|
@ -294,8 +294,8 @@ public abstract class BaseApp {
|
||||||
return commandOpt;
|
return commandOpt;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<BaseCommand> getNextCommand(String[] theArgs) {
|
private Optional<BaseCommand> getNextCommand(String[] theArgs, int thePosition) {
|
||||||
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[0])).findFirst();
|
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])).findFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void processHelp(String[] theArgs) {
|
private void processHelp(String[] theArgs) {
|
||||||
|
@ -303,7 +303,7 @@ public abstract class BaseApp {
|
||||||
logUsage();
|
logUsage();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs);
|
Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 1);
|
||||||
if (! commandOpt.isPresent()) {
|
if (! commandOpt.isPresent()) {
|
||||||
String message = "Unknown command: " + theArgs[1];
|
String message = "Unknown command: " + theArgs[1];
|
||||||
System.err.println(message);
|
System.err.println(message);
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
package ca.uhn.fhir.cli;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.PrintStream;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
|
||||||
|
public class BaseAppTest {
|
||||||
|
|
||||||
|
private final PrintStream standardOut = System.out;
|
||||||
|
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() {
|
||||||
|
System.setOut(new PrintStream(outputStreamCaptor));
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void tearDown() {
|
||||||
|
System.setOut(standardOut);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHelpOption() {
|
||||||
|
App.main(new String[]{"help", "create-package"});
|
||||||
|
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-cli</artifactId>
|
<artifactId>hapi-fhir-cli</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
import org.springframework.beans.factory.annotation.Autowire;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -65,8 +66,8 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("HAPI_PU");
|
retVal.setPersistenceUnitName("HAPI_PU");
|
||||||
retVal.setDataSource(myDataSource);
|
retVal.setDataSource(myDataSource);
|
||||||
retVal.setJpaProperties(myJpaProperties);
|
retVal.setJpaProperties(myJpaProperties);
|
||||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
import org.springframework.beans.factory.annotation.Autowire;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -62,8 +63,8 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("HAPI_PU");
|
retVal.setPersistenceUnitName("HAPI_PU");
|
||||||
retVal.setDataSource(myDataSource);
|
retVal.setDataSource(myDataSource);
|
||||||
retVal.setJpaProperties(myJpaProperties);
|
retVal.setJpaProperties(myJpaProperties);
|
||||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
import org.springframework.beans.factory.annotation.Autowire;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -60,8 +61,8 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("HAPI_PU");
|
retVal.setPersistenceUnitName("HAPI_PU");
|
||||||
retVal.setDataSource(myDataSource);
|
retVal.setDataSource(myDataSource);
|
||||||
retVal.setJpaProperties(myJpaProperties);
|
retVal.setJpaProperties(myJpaProperties);
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: perf
|
||||||
|
issue: 2457
|
||||||
|
title: "A regression in HAPI FHIR 5.3.0 resulted in concurrent searches being executed in a sequential
|
||||||
|
(and not parallel) fashion in some circumstances."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2790
|
||||||
|
title: "The SearchParameter canonical URLs exported by the JPA server have been adjusted to match the URLs
|
||||||
|
specified in the FHIR specification."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: change
|
||||||
|
issue: 2790
|
||||||
|
title: "Support for the `_language` search parameter has been dropped from the JPA server. This search parameter
|
||||||
|
was specified in FHIR DSTU1 but was dropped in later versions. It is rarely used in practice and imposes
|
||||||
|
an indexing cost, so it has now been removed. A custom search parameter may be used in order to achieve
|
||||||
|
the same functionality if needed."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2901
|
||||||
|
jira: SMILE-3004
|
||||||
|
title: "Processing transactions with AutoversionAtPaths set should create those resources (if AutoCreatePlaceholders is set) and use latest version as expected"
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2958
|
||||||
|
jira: SMILE-643
|
||||||
|
title: "Fixed issue where the processing of queries like Procedure?patient= before a cache search would cause the parameter key to be removed.
|
||||||
|
Additionally, ensured that requests like Procedure?patient= cause HTTP 400 Bad Request instead of HTTP 500 Internal Error."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2962
|
||||||
|
jira: SMILE-720
|
||||||
|
title: "Added a new DaoConfig setting called `setElasticSearchIndexPrefix(String prefix)` which will cause Hibernate search to prefix all of its tables with the provided value."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2967
|
||||||
|
jira: SMILE-2899
|
||||||
|
title: "Previously, the system would only traverse references to discrete resources while performing a chained search.
|
||||||
|
This fix adds support for traversing references to contained resources as well, with the limitation that the reference
|
||||||
|
to the contained resource must be the last reference in the chain."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2973
|
||||||
|
title: "CLI `smileutil help {command}` returns `Unknown command` which should return the usage of `command`. This has been corrected."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 2975
|
||||||
|
title: "Two improvements have been made to the connection to Elasticsearch. First, null username and password values are now permitted. Second, multiple hosts are now permitted via the `setHosts()` method on the ElasticHibernatePropertiesBuilder, allowing you to
|
||||||
|
connect to multiple elasticsearch clusters at once. Thanks to Dušan Marković for the contribution!"
|
|
@ -0,0 +1,3 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
title: "Fixed a bug where two identical tags in parallel entries being created in a batch would fail."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: change
|
||||||
|
jira: SMILE-2927
|
||||||
|
title: "During transactions, any resources that were PUT or POSTed with a conditional URL now receive extra validation. There is now a final
|
||||||
|
storage step which ensures that the stored resource actually matches the conditional URL."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: change
|
||||||
|
issue: 2991
|
||||||
|
title: "This PR eliminates the search coordinator threadpool, and executes searches synchronously on the HTTP client
|
||||||
|
thread. The idea of using a separate pool was supposed to help improve server scalability, but ultimately created
|
||||||
|
false bottlenecks and reduced the utility of monitoring infrastructure so it has been eliminated."
|
|
@ -31,12 +31,11 @@ In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need
|
||||||
```java
|
```java
|
||||||
@Bean()
|
@Bean()
|
||||||
public ElasticsearchSvcImpl elasticsearchSvc() {
|
public ElasticsearchSvcImpl elasticsearchSvc() {
|
||||||
String elasticsearchHost = "localhost";
|
String elasticsearchHost = "localhost:9200";
|
||||||
String elasticsearchUserId = "elastic";
|
String elasticsearchUsername = "elastic";
|
||||||
String elasticsearchPassword = "changeme";
|
String elasticsearchPassword = "changeme";
|
||||||
int elasticsearchPort = 9301;
|
|
||||||
|
|
||||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchUsername, elasticsearchPassword);
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -302,6 +302,14 @@ If the server has been configured with a [Resource Server ID Strategy](/apidocs/
|
||||||
Contains the specific version (starting with 1) of the resource that this row corresponds to.
|
Contains the specific version (starting with 1) of the resource that this row corresponds to.
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>RESOURCE_TYPE</td>
|
||||||
|
<td></td>
|
||||||
|
<td>String</td>
|
||||||
|
<td>
|
||||||
|
Contains the string specifying the type of the resource (Patient, Observation, etc).
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
@ -476,7 +484,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
|
||||||
<tr>
|
<tr>
|
||||||
<td>RES_ID</td>
|
<td>RES_ID</td>
|
||||||
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
|
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
|
||||||
<td>String</td>
|
<td>Long</td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td>
|
<td>
|
||||||
Contains the PID of the resource being indexed.
|
Contains the PID of the resource being indexed.
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -258,6 +258,11 @@ public class DaoConfig {
|
||||||
private boolean myAccountForDateIndexNulls;
|
private boolean myAccountForDateIndexNulls;
|
||||||
private boolean myTriggerSubscriptionsForNonVersioningChanges;
|
private boolean myTriggerSubscriptionsForNonVersioningChanges;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 5.6.0
|
||||||
|
*/
|
||||||
|
private String myElasicSearchIndexPrefix;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @since 5.6.0
|
* @since 5.6.0
|
||||||
*/
|
*/
|
||||||
|
@ -269,6 +274,7 @@ public class DaoConfig {
|
||||||
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
|
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
|
||||||
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
|
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
|
@ -2643,7 +2649,29 @@ public class DaoConfig {
|
||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum StoreMetaSourceInformationEnum {
|
/**
|
||||||
|
*
|
||||||
|
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
|
||||||
|
* and terminology expansion indexes.
|
||||||
|
*
|
||||||
|
* @since 5.6.0
|
||||||
|
*/
|
||||||
|
public String getElasticSearchIndexPrefix() {
|
||||||
|
return myElasicSearchIndexPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
|
||||||
|
* and terminology expansion indexes.
|
||||||
|
*
|
||||||
|
* @since 5.6.0
|
||||||
|
*/
|
||||||
|
public void setElasticSearchIndexPrefix(String thePrefix) {
|
||||||
|
myElasicSearchIndexPrefix = thePrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum StoreMetaSourceInformationEnum {
|
||||||
NONE(false, false),
|
NONE(false, false),
|
||||||
SOURCE_URI(true, false),
|
SOURCE_URI(true, false),
|
||||||
REQUEST_ID(false, true),
|
REQUEST_ID(false, true),
|
||||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.api.dao;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||||
import ca.uhn.fhir.rest.annotation.Offset;
|
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||||
|
|
|
@ -52,13 +52,11 @@ public class LazyDaoMethodOutcome extends DaoMethodOutcome {
|
||||||
|
|
||||||
private void tryToRunSupplier() {
|
private void tryToRunSupplier() {
|
||||||
if (myEntitySupplier != null) {
|
if (myEntitySupplier != null) {
|
||||||
|
|
||||||
EntityAndResource entityAndResource = myEntitySupplier.get();
|
EntityAndResource entityAndResource = myEntitySupplier.get();
|
||||||
setEntity(entityAndResource.getEntity());
|
setEntity(entityAndResource.getEntity());
|
||||||
setResource(entityAndResource.getResource());
|
setResource(entityAndResource.getResource());
|
||||||
setId(entityAndResource.getResource().getIdElement());
|
setId(entityAndResource.getResource().getIdElement());
|
||||||
myEntitySupplierUseCallback.run();
|
myEntitySupplierUseCallback.run();
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
@ -815,7 +815,7 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
<resources>
|
<resources>
|
||||||
<resource>
|
<resource>
|
||||||
<directory>${project.basedir}/src/main/resources</directory>
|
<directory>${project.basedir}/src/main/resources</directory>
|
||||||
|
|
|
@ -54,4 +54,8 @@ public class PartitionedUrl implements IModelJson {
|
||||||
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
||||||
myRequestPartitionId = theRequestPartitionId;
|
myRequestPartitionId = theRequestPartitionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isPartitioned() {
|
||||||
|
return myRequestPartitionId != null && !myRequestPartitionId.isDefaultPartition();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.batch.mdm;
|
package ca.uhn.fhir.jpa.batch.mdm;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
|
||||||
List<IBaseResource> outgoing = new ArrayList<>();
|
List<IBaseResource> outgoing = new ArrayList<>();
|
||||||
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
||||||
|
|
||||||
ourLog.trace("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||||
|
|
||||||
return outgoing;
|
return outgoing;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.batch.reader;
|
package ca.uhn.fhir.jpa.batch.reader;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
|
|
@ -24,18 +24,23 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.slf4j.LoggerFactory.getLogger;
|
import static org.slf4j.LoggerFactory.getLogger;
|
||||||
|
@ -52,17 +57,19 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
||||||
DaoRegistry myDaoRegistry;
|
DaoRegistry myDaoRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
IResourceTableDao myResourceTableDao;
|
IResourceTableDao myResourceTableDao;
|
||||||
|
@Autowired
|
||||||
|
IdHelperService myIdHelperService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Nonnull
|
@Nonnull
|
||||||
public ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) {
|
public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) {
|
||||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName);
|
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName);
|
||||||
|
|
||||||
if (ourLog.isDebugEnabled()) {
|
if (ourLog.isDebugEnabled()) {
|
||||||
ourLog.debug("About to retrieve version map for resource type: {}", theResourceName);
|
ourLog.debug("About to retrieve version map for resource type: {}", theResourceName);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions())).stream()
|
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)).stream()
|
||||||
.map(ResourcePersistentId::getIdAsLong)
|
.map(ResourcePersistentId::getIdAsLong)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
@ -74,4 +81,95 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
||||||
|
|
||||||
return ResourceVersionMap.fromResourceTableEntities(allById);
|
return ResourceVersionMap.fromResourceTableEntities(allById);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
/**
|
||||||
|
* Retrieves the latest versions for any resourceid that are found.
|
||||||
|
* If they are not found, they will not be contained in the returned map.
|
||||||
|
* The key should be the same value that was passed in to allow
|
||||||
|
* consumer to look up the value using the id they already have.
|
||||||
|
*
|
||||||
|
* This method should not throw, so it can safely be consumed in
|
||||||
|
* transactions.
|
||||||
|
*
|
||||||
|
* @param theRequestPartitionId - request partition id
|
||||||
|
* @param theIds - list of IIdTypes for resources of interest.
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||||
|
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
|
||||||
|
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
|
||||||
|
|
||||||
|
for (IIdType id : theIds) {
|
||||||
|
String resourceType = id.getResourceType();
|
||||||
|
if (!resourceTypeToIds.containsKey(resourceType)) {
|
||||||
|
resourceTypeToIds.put(resourceType, new ArrayList<>());
|
||||||
|
}
|
||||||
|
resourceTypeToIds.get(resourceType).add(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String resourceType : resourceTypeToIds.keySet()) {
|
||||||
|
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId,
|
||||||
|
resourceTypeToIds.get(resourceType));
|
||||||
|
idToPID.putAll(idAndPID);
|
||||||
|
}
|
||||||
|
|
||||||
|
return idToPID;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper method to determine if some resources exist in the DB (without throwing).
|
||||||
|
* Returns a set that contains the IIdType for every resource found.
|
||||||
|
* If it's not found, it won't be included in the set.
|
||||||
|
*
|
||||||
|
* @param theIds - list of IIdType ids (for the same resource)
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private ResourcePersistentIdMap getIdsOfExistingResources(RequestPartitionId thePartitionId,
|
||||||
|
Collection<IIdType> theIds) {
|
||||||
|
// these are the found Ids that were in the db
|
||||||
|
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
|
||||||
|
|
||||||
|
if (theIds == null || theIds.isEmpty()) {
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId,
|
||||||
|
theIds.stream().collect(Collectors.toList()));
|
||||||
|
|
||||||
|
// we'll use this map to fetch pids that require versions
|
||||||
|
HashMap<Long, ResourcePersistentId> pidsToVersionToResourcePid = new HashMap<>();
|
||||||
|
|
||||||
|
// fill in our map
|
||||||
|
for (ResourcePersistentId pid : resourcePersistentIds) {
|
||||||
|
if (pid.getVersion() == null) {
|
||||||
|
pidsToVersionToResourcePid.put(pid.getIdAsLong(), pid);
|
||||||
|
}
|
||||||
|
Optional<IIdType> idOp = theIds.stream()
|
||||||
|
.filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
|
||||||
|
.findFirst();
|
||||||
|
// this should always be present
|
||||||
|
// since it was passed in.
|
||||||
|
// but land of optionals...
|
||||||
|
idOp.ifPresent(id -> {
|
||||||
|
retval.put(id, pid);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// set any versions we don't already have
|
||||||
|
if (!pidsToVersionToResourcePid.isEmpty()) {
|
||||||
|
Collection<Object[]> resourceEntries = myResourceTableDao
|
||||||
|
.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
|
||||||
|
|
||||||
|
for (Object[] record : resourceEntries) {
|
||||||
|
// order matters!
|
||||||
|
Long retPid = (Long) record[0];
|
||||||
|
String resType = (String) record[1];
|
||||||
|
Long version = (Long) record[2];
|
||||||
|
pidsToVersionToResourcePid.get(retPid).setVersion(version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
||||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||||
|
@ -155,6 +156,7 @@ import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
|
||||||
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
|
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
|
||||||
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
|
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.ApplicationContext;
|
import org.springframework.context.ApplicationContext;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
@ -288,8 +290,8 @@ public abstract class BaseConfig {
|
||||||
* bean, but it provides a partially completed entity manager
|
* bean, but it provides a partially completed entity manager
|
||||||
* factory with HAPI FHIR customizations
|
* factory with HAPI FHIR customizations
|
||||||
*/
|
*/
|
||||||
protected LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
protected LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory myConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean();
|
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
|
||||||
configureEntityManagerFactory(retVal, fhirContext());
|
configureEntityManagerFactory(retVal, fhirContext());
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
@ -378,17 +380,6 @@ public abstract class BaseConfig {
|
||||||
return new TermConceptMappingSvcImpl();
|
return new TermConceptMappingSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
|
||||||
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
|
|
||||||
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
|
|
||||||
threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
|
|
||||||
threadPoolTaskExecutor.setCorePoolSize(searchCoordCorePoolSize);
|
|
||||||
threadPoolTaskExecutor.setMaxPoolSize(searchCoordMaxPoolSize);
|
|
||||||
threadPoolTaskExecutor.setQueueCapacity(searchCoordQueueCapacity);
|
|
||||||
threadPoolTaskExecutor.initialize();
|
|
||||||
return threadPoolTaskExecutor;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public TaskScheduler taskScheduler() {
|
public TaskScheduler taskScheduler() {
|
||||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
||||||
|
@ -849,8 +840,8 @@ public abstract class BaseConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public ISearchCoordinatorSvc searchCoordinatorSvc(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) {
|
public ISearchCoordinatorSvc searchCoordinatorSvc() {
|
||||||
return new SearchCoordinatorSvcImpl(searchCoordinatorThreadFactory);
|
return new SearchCoordinatorSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
@ -919,6 +910,11 @@ public abstract class BaseConfig {
|
||||||
return new PredicateBuilderFactory(theApplicationContext);
|
return new PredicateBuilderFactory(theApplicationContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public IndexNamePrefixLayoutStrategy indexLayoutStrategy() {
|
||||||
|
return new IndexNamePrefixLayoutStrategy();
|
||||||
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public JpaResourceLoader jpaResourceLoader() {
|
public JpaResourceLoader jpaResourceLoader() {
|
||||||
return new JpaResourceLoader();
|
return new JpaResourceLoader();
|
||||||
|
|
|
@ -23,6 +23,9 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import org.hibernate.cfg.AvailableSettings;
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.query.criteria.LiteralHandlingMode;
|
import org.hibernate.query.criteria.LiteralHandlingMode;
|
||||||
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
|
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
|
import org.springframework.orm.hibernate5.SpringBeanContainer;
|
||||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -32,6 +35,14 @@ import java.util.Map;
|
||||||
* that sets some sensible default property values
|
* that sets some sensible default property values
|
||||||
*/
|
*/
|
||||||
public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean {
|
public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean {
|
||||||
|
|
||||||
|
//https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener
|
||||||
|
ConfigurableListableBeanFactory myConfigurableListableBeanFactory;
|
||||||
|
|
||||||
|
public HapiFhirLocalContainerEntityManagerFactoryBean(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
|
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getJpaPropertyMap() {
|
public Map<String, Object> getJpaPropertyMap() {
|
||||||
Map<String, Object> retVal = super.getJpaPropertyMap();
|
Map<String, Object> retVal = super.getJpaPropertyMap();
|
||||||
|
@ -63,6 +74,11 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
|
||||||
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
|
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
|
||||||
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
|
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
|
||||||
}
|
}
|
||||||
|
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate needs
|
||||||
|
// in order to be able to resolve beans, so we add it back in manually here
|
||||||
|
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
|
||||||
|
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
|
||||||
|
}
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1207,7 +1207,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
|
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
|
||||||
|
|
||||||
newParams = new ResourceIndexedSearchParams();
|
newParams = new ResourceIndexedSearchParams();
|
||||||
|
|
||||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
||||||
|
|
||||||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||||
|
@ -1229,12 +1228,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
}
|
}
|
||||||
|
|
||||||
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||||
if (theResource instanceof IResource) {
|
|
||||||
entity.setLanguage(((IResource) theResource).getLanguage().getValue());
|
|
||||||
} else {
|
|
||||||
entity.setLanguage(((IAnyResource) theResource).getLanguageElement().getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
||||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,9 +136,12 @@ import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
|
|
@ -25,19 +25,18 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||||
|
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||||
|
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
|
||||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
|
||||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||||
|
@ -45,12 +44,16 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
|
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
|
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
import ca.uhn.fhir.rest.param.QualifierDetails;
|
import ca.uhn.fhir.rest.param.QualifierDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||||
|
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.util.BundleUtil;
|
import ca.uhn.fhir.util.BundleUtil;
|
||||||
import ca.uhn.fhir.util.FhirTerser;
|
import ca.uhn.fhir.util.FhirTerser;
|
||||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||||
|
@ -91,6 +94,10 @@ public abstract class BaseStorageDao {
|
||||||
protected DaoRegistry myDaoRegistry;
|
protected DaoRegistry myDaoRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ModelConfig myModelConfig;
|
protected ModelConfig myModelConfig;
|
||||||
|
@Autowired
|
||||||
|
protected IResourceVersionSvc myResourceVersionSvc;
|
||||||
|
@Autowired
|
||||||
|
protected DaoConfig myDaoConfig;
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
|
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
|
||||||
|
@ -204,10 +211,33 @@ public abstract class BaseStorageDao {
|
||||||
for (IBaseReference nextReference : referencesToVersion) {
|
for (IBaseReference nextReference : referencesToVersion) {
|
||||||
IIdType referenceElement = nextReference.getReferenceElement();
|
IIdType referenceElement = nextReference.getReferenceElement();
|
||||||
if (!referenceElement.hasBaseUrl()) {
|
if (!referenceElement.hasBaseUrl()) {
|
||||||
String resourceType = referenceElement.getResourceType();
|
|
||||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceType);
|
ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
|
||||||
String targetVersionId = dao.getCurrentVersionId(referenceElement);
|
Collections.singletonList(referenceElement)
|
||||||
String newTargetReference = referenceElement.withVersion(targetVersionId).getValue();
|
);
|
||||||
|
|
||||||
|
// 3 cases:
|
||||||
|
// 1) there exists a resource in the db with some version (use this version)
|
||||||
|
// 2) no resource exists, but we will create one (eventually). The version is 1
|
||||||
|
// 3) no resource exists, and none will be made -> throw
|
||||||
|
Long version;
|
||||||
|
if (resourceVersionMap.containsKey(referenceElement)) {
|
||||||
|
// the resource exists... latest id
|
||||||
|
// will be the value in the ResourcePersistentId
|
||||||
|
version = resourceVersionMap.getResourcePersistentId(referenceElement).getVersion();
|
||||||
|
} else if (myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
|
||||||
|
// if idToPID doesn't contain object
|
||||||
|
// but autcreateplaceholders is on
|
||||||
|
// then the version will be 1 (the first version)
|
||||||
|
version = 1L;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// resource not found
|
||||||
|
// and no autocreateplaceholders set...
|
||||||
|
// we throw
|
||||||
|
throw new ResourceNotFoundException(referenceElement);
|
||||||
|
}
|
||||||
|
String newTargetReference = referenceElement.withVersion(version.toString()).getValue();
|
||||||
nextReference.setReference(newTargetReference);
|
nextReference.setReference(newTargetReference);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,38 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility class to help identify classes of failure.
|
||||||
|
*/
|
||||||
|
public class DaoFailureUtil {
|
||||||
|
|
||||||
|
public static boolean isTagStorageFailure(Throwable t) {
|
||||||
|
if (StringUtils.isBlank(t.getMessage())) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
String msg = t.getMessage().toLowerCase();
|
||||||
|
return msg.contains("hfj_tag_def") || msg.contains("hfj_res_tag");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -100,6 +100,16 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
|
||||||
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
|
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
|
||||||
Long findCurrentVersionByPid(@Param("pid") Long thePid);
|
Long findCurrentVersionByPid(@Param("pid") Long thePid);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This query will return rows with the following values:
|
||||||
|
* Id (resource pid - long), ResourceType (Patient, etc), version (long)
|
||||||
|
* Order matters!
|
||||||
|
* @param pid - list of pids to get versions for
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
@Query("SELECT t.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myId IN ( :pid )")
|
||||||
|
Collection<Object[]> getResourceVersionsForPid(@Param("pid") List<Long> pid);
|
||||||
|
|
||||||
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
|
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
|
||||||
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);
|
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);
|
||||||
|
|
||||||
|
|
|
@ -94,7 +94,6 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
|
||||||
throw new InvalidRequestException("Resource " + resName + "/" + idPart + " not found, specified in path: " + theSourcePath);
|
throw new InvalidRequestException("Resource " + resName + "/" + idPart + " not found, specified in path: " + theSourcePath);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resolvedResource = createdTableOpt.get();
|
resolvedResource = createdTableOpt.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
@ -204,7 +205,11 @@ public class IdHelperService {
|
||||||
*/
|
*/
|
||||||
@Nonnull
|
@Nonnull
|
||||||
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||||
theIds.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
for (IIdType id : theIds) {
|
||||||
|
if (!id.hasIdPart()) {
|
||||||
|
throw new InvalidRequestException("Parameter value missing in request");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (theIds.isEmpty()) {
|
if (theIds.isEmpty()) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
|
@ -303,7 +308,7 @@ public class IdHelperService {
|
||||||
if (forcedId.isPresent()) {
|
if (forcedId.isPresent()) {
|
||||||
retVal.setValue(theResourceType + '/' + forcedId.get());
|
retVal.setValue(theResourceType + '/' + forcedId.get());
|
||||||
} else {
|
} else {
|
||||||
retVal.setValue(theResourceType + '/' + theId.toString());
|
retVal.setValue(theResourceType + '/' + theId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
|
|
@ -559,11 +559,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
||||||
myPredicateBuilder.addPredicateResourceId(theAndOrParams, theResourceName, theRequestPartitionId);
|
myPredicateBuilder.addPredicateResourceId(theAndOrParams, theResourceName, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IAnyResource.SP_RES_LANGUAGE:
|
|
||||||
addPredicateLanguage(theAndOrParams,
|
|
||||||
null);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case Constants.PARAM_HAS:
|
case Constants.PARAM_HAS:
|
||||||
addPredicateHas(theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
addPredicateHas(theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
|
@ -733,9 +728,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
||||||
null,
|
null,
|
||||||
theFilter.getValue());
|
theFilter.getValue());
|
||||||
return myPredicateBuilder.addPredicateResourceId(Collections.singletonList(Collections.singletonList(param)), myResourceName, theFilter.getOperation(), theRequestPartitionId);
|
return myPredicateBuilder.addPredicateResourceId(Collections.singletonList(Collections.singletonList(param)), myResourceName, theFilter.getOperation(), theRequestPartitionId);
|
||||||
} else if (theFilter.getParamPath().getName().equals(IAnyResource.SP_RES_LANGUAGE)) {
|
|
||||||
return addPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))),
|
|
||||||
theFilter.getOperation());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, theFilter.getParamPath().getName());
|
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, theFilter.getParamPath().getName());
|
||||||
|
@ -828,45 +820,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
||||||
return qp;
|
return qp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Predicate addPredicateLanguage(List<List<IQueryParameterType>> theList,
|
|
||||||
SearchFilterParser.CompareOperation operation) {
|
|
||||||
for (List<? extends IQueryParameterType> nextList : theList) {
|
|
||||||
|
|
||||||
Set<String> values = new HashSet<>();
|
|
||||||
for (IQueryParameterType next : nextList) {
|
|
||||||
if (next instanceof StringParam) {
|
|
||||||
String nextValue = ((StringParam) next).getValue();
|
|
||||||
if (isBlank(nextValue)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
values.add(nextValue);
|
|
||||||
} else {
|
|
||||||
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (values.isEmpty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Predicate predicate;
|
|
||||||
if ((operation == null) ||
|
|
||||||
(operation == SearchFilterParser.CompareOperation.eq)) {
|
|
||||||
predicate = myQueryStack.get("myLanguage").as(String.class).in(values);
|
|
||||||
} else if (operation == SearchFilterParser.CompareOperation.ne) {
|
|
||||||
predicate = myQueryStack.get("myLanguage").as(String.class).in(values).not();
|
|
||||||
} else {
|
|
||||||
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
|
|
||||||
}
|
|
||||||
myQueryStack.addPredicate(predicate);
|
|
||||||
if (operation != null) {
|
|
||||||
return predicate;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addPredicateSource(List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest) {
|
private void addPredicateSource(List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest) {
|
||||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||||
addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest);
|
addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest);
|
||||||
|
|
|
@ -261,6 +261,9 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
|
||||||
if (theSearchParam != null) {
|
if (theSearchParam != null) {
|
||||||
Set<String> valueSetUris = Sets.newHashSet();
|
Set<String> valueSetUris = Sets.newHashSet();
|
||||||
for (String nextPath : theSearchParam.getPathsSplit()) {
|
for (String nextPath : theSearchParam.getPathsSplit()) {
|
||||||
|
if (!nextPath.startsWith(myResourceType + ".")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, nextPath);
|
BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, nextPath);
|
||||||
if (def instanceof BaseRuntimeDeclaredChildDefinition) {
|
if (def instanceof BaseRuntimeDeclaredChildDefinition) {
|
||||||
String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet();
|
String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet();
|
||||||
|
|
|
@ -92,8 +92,12 @@ public class FhirResourceDaoSearchParameterR4 extends BaseHapiFhirResourceDao<Se
|
||||||
for (IPrimitiveType<?> nextBaseType : theResource.getBase()) {
|
for (IPrimitiveType<?> nextBaseType : theResource.getBase()) {
|
||||||
String nextBase = nextBaseType.getValueAsString();
|
String nextBase = nextBaseType.getValueAsString();
|
||||||
RuntimeSearchParam existingSearchParam = theSearchParamRegistry.getActiveSearchParam(nextBase, theResource.getCode());
|
RuntimeSearchParam existingSearchParam = theSearchParamRegistry.getActiveSearchParam(nextBase, theResource.getCode());
|
||||||
if (existingSearchParam != null && existingSearchParam.getId() == null) {
|
if (existingSearchParam != null) {
|
||||||
throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server");
|
boolean isBuiltIn = existingSearchParam.getId() == null;
|
||||||
|
isBuiltIn |= existingSearchParam.getUri().startsWith("http://hl7.org/fhir/SearchParameter/");
|
||||||
|
if (isBuiltIn) {
|
||||||
|
throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
|
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
@ -93,10 +94,9 @@ public class HapiTransactionService {
|
||||||
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
|
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
|
||||||
* which is the tag definition table. In that case, a constraint error will be
|
* which is the tag definition table. In that case, a constraint error will be
|
||||||
* thrown by one of the client threads, so we auto-retry in order to avoid
|
* thrown by one of the client threads, so we auto-retry in order to avoid
|
||||||
* annopying spurious failures for the client.
|
* annoying spurious failures for the client.
|
||||||
*/
|
*/
|
||||||
if (e.getMessage().contains("HFJ_TAG_DEF") || e.getMessage().contains("hfj_tag_def") ||
|
if (DaoFailureUtil.isTagStorageFailure(e)) {
|
||||||
e.getMessage().contains("HFJ_RES_TAG") || e.getMessage().contains("hfj_res_tag")) {
|
|
||||||
maxRetries = 3;
|
maxRetries = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,6 @@ import org.springframework.data.domain.Sort;
|
||||||
import org.springframework.orm.jpa.JpaDialect;
|
import org.springframework.orm.jpa.JpaDialect;
|
||||||
import org.springframework.orm.jpa.JpaTransactionManager;
|
import org.springframework.orm.jpa.JpaTransactionManager;
|
||||||
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
|
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
|
||||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
import org.springframework.transaction.TransactionDefinition;
|
import org.springframework.transaction.TransactionDefinition;
|
||||||
|
@ -111,7 +110,6 @@ import java.util.UUID;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||||
|
@ -123,7 +121,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
public static final Integer INTEGER_0 = 0;
|
public static final Integer INTEGER_0 = 0;
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
||||||
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<>();
|
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<>();
|
||||||
private final ExecutorService myExecutor;
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -162,8 +159,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
@Autowired
|
@Autowired
|
||||||
public SearchCoordinatorSvcImpl(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) {
|
public SearchCoordinatorSvcImpl() {
|
||||||
myExecutor = searchCoordinatorThreadFactory.getThreadPoolExecutor();
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
Set<String> getActiveSearchIds() {
|
||||||
|
return myIdToSearchTask.keySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
@ -274,7 +276,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
|
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
|
||||||
SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType, theRequestDetails, requestPartitionId);
|
SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType, theRequestDetails, requestPartitionId);
|
||||||
myIdToSearchTask.put(search.getUuid(), task);
|
myIdToSearchTask.put(search.getUuid(), task);
|
||||||
myExecutor.submit(task);
|
task.call();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -406,7 +408,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
|
|
||||||
SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId);
|
SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId);
|
||||||
myIdToSearchTask.put(theSearch.getUuid(), task);
|
myIdToSearchTask.put(theSearch.getUuid(), task);
|
||||||
myExecutor.submit(task);
|
task.call();
|
||||||
|
|
||||||
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
|
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
|
||||||
|
|
||||||
|
@ -1087,7 +1089,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
ourLog.trace("Performing count");
|
ourLog.trace("Performing count");
|
||||||
ISearchBuilder sb = newSearchBuilder();
|
ISearchBuilder sb = newSearchBuilder();
|
||||||
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
|
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
|
||||||
Long count = countIterator.hasNext() ? countIterator.next() : 0;
|
Long count = countIterator.hasNext() ? countIterator.next() : 0L;
|
||||||
ourLog.trace("Got count {}", count);
|
ourLog.trace("Got count {}", count);
|
||||||
|
|
||||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||||
|
|
|
@ -434,9 +434,6 @@ public class QueryStack {
|
||||||
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
||||||
return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId);
|
return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId);
|
||||||
}
|
}
|
||||||
case IAnyResource.SP_RES_LANGUAGE: {
|
|
||||||
return theQueryStack3.createPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))), theFilter.getOperation());
|
|
||||||
}
|
|
||||||
case Constants.PARAM_SOURCE: {
|
case Constants.PARAM_SOURCE: {
|
||||||
TokenParam param = new TokenParam();
|
TokenParam param = new TokenParam();
|
||||||
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
||||||
|
@ -579,44 +576,6 @@ public class QueryStack {
|
||||||
return toAndPredicate(andPredicates);
|
return toAndPredicate(andPredicates);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Condition createPredicateLanguage(List<List<IQueryParameterType>> theList, Object theOperation) {
|
|
||||||
|
|
||||||
ResourceTablePredicateBuilder rootTable = mySqlBuilder.getOrCreateResourceTablePredicateBuilder();
|
|
||||||
|
|
||||||
List<Condition> predicates = new ArrayList<>();
|
|
||||||
for (List<? extends IQueryParameterType> nextList : theList) {
|
|
||||||
|
|
||||||
Set<String> values = new HashSet<>();
|
|
||||||
for (IQueryParameterType next : nextList) {
|
|
||||||
if (next instanceof StringParam) {
|
|
||||||
String nextValue = ((StringParam) next).getValue();
|
|
||||||
if (isBlank(nextValue)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
values.add(nextValue);
|
|
||||||
} else {
|
|
||||||
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (values.isEmpty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((theOperation == null) ||
|
|
||||||
(theOperation == SearchFilterParser.CompareOperation.eq)) {
|
|
||||||
predicates.add(rootTable.createLanguagePredicate(values, false));
|
|
||||||
} else if (theOperation == SearchFilterParser.CompareOperation.ne) {
|
|
||||||
predicates.add(rootTable.createLanguagePredicate(values, true));
|
|
||||||
} else {
|
|
||||||
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return toAndPredicate(predicates);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName,
|
public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName,
|
||||||
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
|
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
|
||||||
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
|
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
|
||||||
|
@ -729,10 +688,10 @@ public class QueryStack {
|
||||||
return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId);
|
return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
public Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
||||||
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
||||||
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
||||||
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||||
|
|
||||||
String spnamePrefix = theParamName;
|
String spnamePrefix = theParamName;
|
||||||
|
|
||||||
|
@ -794,31 +753,31 @@ public class QueryStack {
|
||||||
|
|
||||||
switch (targetParamDefinition.getParamType()) {
|
switch (targetParamDefinition.getParamType()) {
|
||||||
case DATE:
|
case DATE:
|
||||||
containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateDate(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequestPartitionId);
|
orValues, theOperation, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case NUMBER:
|
case NUMBER:
|
||||||
containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateNumber(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequestPartitionId);
|
orValues, theOperation, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case QUANTITY:
|
case QUANTITY:
|
||||||
containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateQuantity(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequestPartitionId);
|
orValues, theOperation, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case STRING:
|
case STRING:
|
||||||
containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateString(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequestPartitionId);
|
orValues, theOperation, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case TOKEN:
|
case TOKEN:
|
||||||
containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateToken(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequestPartitionId);
|
orValues, theOperation, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case COMPOSITE:
|
case COMPOSITE:
|
||||||
containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateComposite(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theRequestPartitionId);
|
orValues, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case URI:
|
case URI:
|
||||||
containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition,
|
containedCondition = createPredicateUri(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||||
orValues, theOperation, theRequest, theRequestPartitionId);
|
orValues, theOperation, theRequest, theRequestPartitionId);
|
||||||
break;
|
break;
|
||||||
case HAS:
|
case HAS:
|
||||||
|
@ -1099,9 +1058,6 @@ public class QueryStack {
|
||||||
case IAnyResource.SP_RES_ID:
|
case IAnyResource.SP_RES_ID:
|
||||||
return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId);
|
return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId);
|
||||||
|
|
||||||
case IAnyResource.SP_RES_LANGUAGE:
|
|
||||||
return createPredicateLanguage(theAndOrParams, null);
|
|
||||||
|
|
||||||
case Constants.PARAM_HAS:
|
case Constants.PARAM_HAS:
|
||||||
return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
||||||
|
|
||||||
|
@ -1162,10 +1118,24 @@ public class QueryStack {
|
||||||
break;
|
break;
|
||||||
case REFERENCE:
|
case REFERENCE:
|
||||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||||
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE))
|
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) {
|
||||||
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
// TODO: The _contained parameter is not intended to control search chain interpretation like this.
|
||||||
else
|
// See SMILE-2898 for details.
|
||||||
|
// For now, leave the incorrect implementation alone, just in case someone is relying on it,
|
||||||
|
// until the complete fix is available.
|
||||||
|
andPredicates.add(createPredicateReferenceForContainedResource(null, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
||||||
|
} else if (isEligibleForContainedResourceSearch(nextAnd)) {
|
||||||
|
// TODO for now, restrict contained reference traversal to the last reference in the chain
|
||||||
|
// We don't seem to be indexing the outbound references of a contained resource, so we can't
|
||||||
|
// include them in search chains.
|
||||||
|
// It would be nice to eventually relax this constraint, but no client seems to be asking for it.
|
||||||
|
andPredicates.add(toOrPredicate(
|
||||||
|
createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId),
|
||||||
|
createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)
|
||||||
|
));
|
||||||
|
} else {
|
||||||
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
|
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case STRING:
|
case STRING:
|
||||||
|
@ -1243,6 +1213,14 @@ public class QueryStack {
|
||||||
return toAndPredicate(andPredicates);
|
return toAndPredicate(andPredicates);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isEligibleForContainedResourceSearch(List<? extends IQueryParameterType> nextAnd) {
|
||||||
|
return myModelConfig.isIndexOnContainedResources() &&
|
||||||
|
nextAnd.stream()
|
||||||
|
.filter(t -> t instanceof ReferenceParam)
|
||||||
|
.map(t -> (ReferenceParam) t)
|
||||||
|
.noneMatch(t -> t.getChain().contains("."));
|
||||||
|
}
|
||||||
|
|
||||||
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
|
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
|
||||||
ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder();
|
ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder();
|
||||||
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);
|
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);
|
||||||
|
|
|
@ -377,7 +377,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||||
|
|
||||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS)) {
|
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||||
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||||
if (activeComboParams.isEmpty()) {
|
if (activeComboParams.isEmpty()) {
|
||||||
sqlBuilder.setNeedResourceTableRoot(true);
|
sqlBuilder.setNeedResourceTableRoot(true);
|
||||||
|
@ -487,6 +487,13 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return Optional.of(executor);
|
return Optional.of(executor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) {
|
||||||
|
return myModelConfig.isIndexOnContainedResources() && theParams.values().stream()
|
||||||
|
.flatMap(Collection::stream)
|
||||||
|
.flatMap(Collection::stream)
|
||||||
|
.anyMatch(t -> t instanceof ReferenceParam);
|
||||||
|
}
|
||||||
|
|
||||||
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||||
/*
|
/*
|
||||||
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||||
|
|
|
@ -38,20 +38,18 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
||||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
|
||||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||||
|
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
|
||||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
|
||||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||||
|
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
import ca.uhn.fhir.rest.param.CompositeParam;
|
import ca.uhn.fhir.rest.param.CompositeParam;
|
||||||
|
@ -66,6 +64,8 @@ import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||||
|
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||||
|
@ -341,15 +341,26 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
||||||
List<Condition> orPredicates = new ArrayList<>();
|
List<Condition> orPredicates = new ArrayList<>();
|
||||||
boolean paramInverted = false;
|
boolean paramInverted = false;
|
||||||
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
|
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
|
||||||
for (String nextType : resourceTypes) {
|
|
||||||
String chain = theReferenceParam.getChain();
|
|
||||||
|
|
||||||
String remainingChain = null;
|
String chain = theReferenceParam.getChain();
|
||||||
int chainDotIndex = chain.indexOf('.');
|
|
||||||
if (chainDotIndex != -1) {
|
String remainingChain = null;
|
||||||
remainingChain = chain.substring(chainDotIndex + 1);
|
int chainDotIndex = chain.indexOf('.');
|
||||||
chain = chain.substring(0, chainDotIndex);
|
if (chainDotIndex != -1) {
|
||||||
}
|
remainingChain = chain.substring(chainDotIndex + 1);
|
||||||
|
chain = chain.substring(0, chainDotIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
int qualifierIndex = chain.indexOf(':');
|
||||||
|
String qualifier = null;
|
||||||
|
if (qualifierIndex != -1) {
|
||||||
|
qualifier = chain.substring(qualifierIndex);
|
||||||
|
chain = chain.substring(0, qualifierIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
||||||
|
|
||||||
|
for (String nextType : resourceTypes) {
|
||||||
|
|
||||||
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
|
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
|
||||||
String subResourceName = typeDef.getName();
|
String subResourceName = typeDef.getName();
|
||||||
|
@ -360,14 +371,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
int qualifierIndex = chain.indexOf(':');
|
|
||||||
String qualifier = null;
|
|
||||||
if (qualifierIndex != -1) {
|
|
||||||
qualifier = chain.substring(qualifierIndex);
|
|
||||||
chain = chain.substring(0, qualifierIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
|
||||||
RuntimeSearchParam param = null;
|
RuntimeSearchParam param = null;
|
||||||
if (!isMeta) {
|
if (!isMeta) {
|
||||||
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
|
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
|
||||||
|
@ -408,7 +411,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
||||||
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE));
|
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE));
|
||||||
|
|
||||||
orPredicates.add(toAndPredicate(andPredicates));
|
orPredicates.add(toAndPredicate(andPredicates));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (candidateTargetTypes.isEmpty()) {
|
if (candidateTargetTypes.isEmpty()) {
|
||||||
|
|
|
@ -222,7 +222,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
||||||
if (retVal == null) {
|
if (retVal == null) {
|
||||||
if (theSearchParam != null) {
|
if (theSearchParam != null) {
|
||||||
Set<String> valueSetUris = Sets.newHashSet();
|
Set<String> valueSetUris = Sets.newHashSet();
|
||||||
for (String nextPath : theSearchParam.getPathsSplit()) {
|
for (String nextPath : theSearchParam.getPathsSplitForResourceType(getResourceType())) {
|
||||||
Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition(getResourceType()).getImplementingClass();
|
Class<? extends IBaseResource> type = getFhirContext().getResourceDefinition(getResourceType()).getImplementingClass();
|
||||||
BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(type, nextPath);
|
BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(type, nextPath);
|
||||||
if (def instanceof BaseRuntimeDeclaredChildDefinition) {
|
if (def instanceof BaseRuntimeDeclaredChildDefinition) {
|
||||||
|
|
|
@ -549,7 +549,7 @@ public class SearchQueryBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
public ComboCondition addPredicateLastUpdated(DateRangeParam theDateRange) {
|
public ComboCondition addPredicateLastUpdated(DateRangeParam theDateRange) {
|
||||||
ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder();
|
ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder(false);
|
||||||
|
|
||||||
List<Condition> conditions = new ArrayList<>(2);
|
List<Condition> conditions = new ArrayList<>(2);
|
||||||
if (theDateRange.getLowerBoundAsInstant() != null) {
|
if (theDateRange.getLowerBoundAsInstant() != null) {
|
||||||
|
|
|
@ -37,6 +37,7 @@ import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings
|
||||||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
@ -49,13 +50,13 @@ import static org.slf4j.LoggerFactory.getLogger;
|
||||||
* FHIR JPA server. This class also injects a starter template into the ES cluster.
|
* FHIR JPA server. This class also injects a starter template into the ES cluster.
|
||||||
*/
|
*/
|
||||||
public class ElasticsearchHibernatePropertiesBuilder {
|
public class ElasticsearchHibernatePropertiesBuilder {
|
||||||
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
|
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
|
||||||
|
|
||||||
|
|
||||||
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW;
|
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW;
|
||||||
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
|
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
|
||||||
|
|
||||||
private String myRestUrl;
|
private String myHosts;
|
||||||
private String myUsername;
|
private String myUsername;
|
||||||
private String myPassword;
|
private String myPassword;
|
||||||
private long myIndexManagementWaitTimeoutMillis = 10000L;
|
private long myIndexManagementWaitTimeoutMillis = 10000L;
|
||||||
|
@ -77,11 +78,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
||||||
|
|
||||||
// the below properties are used for ElasticSearch integration
|
// the below properties are used for ElasticSearch integration
|
||||||
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
|
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
|
||||||
|
|
||||||
|
|
||||||
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
|
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
|
||||||
|
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myHosts);
|
||||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl);
|
|
||||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
|
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(myUsername)) {
|
if (StringUtils.isNotBlank(myUsername)) {
|
||||||
|
@ -99,8 +97,10 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
||||||
theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy);
|
theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy);
|
||||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog));
|
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog));
|
||||||
|
|
||||||
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword);
|
//This tells elasticsearch to use our custom index naming strategy.
|
||||||
|
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
|
||||||
|
|
||||||
|
injectStartupTemplate(myProtocol, myHosts, myUsername, myPassword);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
|
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
|
||||||
|
@ -108,11 +108,8 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchHibernatePropertiesBuilder setRestUrl(String theRestUrl) {
|
public ElasticsearchHibernatePropertiesBuilder setHosts(String hosts) {
|
||||||
if (theRestUrl.contains("://")) {
|
myHosts = hosts;
|
||||||
throw new ConfigurationException("Elasticsearch URL cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
|
|
||||||
}
|
|
||||||
myRestUrl = theRestUrl;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,18 +144,13 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
||||||
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
|
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
|
||||||
* eventually be removed in favour of whatever solution they come up with.
|
* eventually be removed in favour of whatever solution they come up with.
|
||||||
*/
|
*/
|
||||||
void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) {
|
void injectStartupTemplate(String theProtocol, String theHosts, @Nullable String theUsername, @Nullable String thePassword) {
|
||||||
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
|
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
|
||||||
.patterns(Arrays.asList("resourcetable-*", "termconcept-*"))
|
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
|
||||||
.settings(Settings.builder().put("index.max_ngram_diff", 50));
|
.settings(Settings.builder().put("index.max_ngram_diff", 50));
|
||||||
|
|
||||||
int colonIndex = theHostAndPort.indexOf(":");
|
|
||||||
String host = theHostAndPort.substring(0, colonIndex);
|
|
||||||
Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1));
|
|
||||||
String qualifiedHost = theProtocol + "://" + host;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword);
|
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theProtocol, theHosts, theUsername, thePassword);
|
||||||
ourLog.info("Adding starter template for large ngram diffs");
|
ourLog.info("Adding starter template for large ngram diffs");
|
||||||
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
|
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
|
||||||
assert acknowledgedResponse.isAcknowledged();
|
assert acknowledgedResponse.isAcknowledged();
|
||||||
|
|
|
@ -0,0 +1,99 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.elastic;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.logging.impl.Log;
|
||||||
|
import org.hibernate.search.util.common.logging.impl.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class instructs hibernate search on how to create index names for indexed entities.
|
||||||
|
* In our case, we use this class to add an optional prefix to all indices which are created, which can be controlled via
|
||||||
|
* {@link DaoConfig#setElasticSearchIndexPrefix(String)}.
|
||||||
|
*/
|
||||||
|
@Service
|
||||||
|
public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DaoConfig myDaoConfig;
|
||||||
|
|
||||||
|
static final Log log = LoggerFactory.make(Log.class, MethodHandles.lookup());
|
||||||
|
public static final String NAME = "prefix";
|
||||||
|
public static final Pattern UNIQUE_KEY_EXTRACTION_PATTERN = Pattern.compile("(.*)-\\d{6}");
|
||||||
|
|
||||||
|
public String createInitialElasticsearchIndexName(String hibernateSearchIndexName) {
|
||||||
|
return addPrefixIfNecessary(hibernateSearchIndexName + "-000001");
|
||||||
|
}
|
||||||
|
|
||||||
|
public String createWriteAlias(String hibernateSearchIndexName) {
|
||||||
|
return addPrefixIfNecessary(hibernateSearchIndexName +"-write");
|
||||||
|
}
|
||||||
|
|
||||||
|
public String createReadAlias(String hibernateSearchIndexName) {
|
||||||
|
return addPrefixIfNecessary(hibernateSearchIndexName + "-read");
|
||||||
|
}
|
||||||
|
|
||||||
|
private String addPrefixIfNecessary(String theCandidateName) {
|
||||||
|
validateDaoConfigIsPresent();
|
||||||
|
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
|
||||||
|
return myDaoConfig.getElasticSearchIndexPrefix() + "-" + theCandidateName;
|
||||||
|
} else {
|
||||||
|
return theCandidateName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String extractUniqueKeyFromHibernateSearchIndexName(String hibernateSearchIndexName) {
|
||||||
|
return hibernateSearchIndexName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String extractUniqueKeyFromElasticsearchIndexName(String elasticsearchIndexName) {
|
||||||
|
Matcher matcher = UNIQUE_KEY_EXTRACTION_PATTERN.matcher(elasticsearchIndexName);
|
||||||
|
if (!matcher.matches()) {
|
||||||
|
throw log.invalidIndexPrimaryName(elasticsearchIndexName, UNIQUE_KEY_EXTRACTION_PATTERN);
|
||||||
|
} else {
|
||||||
|
String candidateUniqueKey= matcher.group(1);
|
||||||
|
return removePrefixIfNecessary(candidateUniqueKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String removePrefixIfNecessary(String theCandidateUniqueKey) {
|
||||||
|
validateDaoConfigIsPresent();
|
||||||
|
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
|
||||||
|
return theCandidateUniqueKey.replace(myDaoConfig.getElasticSearchIndexPrefix() + "-", "");
|
||||||
|
} else {
|
||||||
|
return theCandidateUniqueKey;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private void validateDaoConfigIsPresent() {
|
||||||
|
if (myDaoConfig == null) {
|
||||||
|
throw new ConfigurationException("While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the DaoConfig. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.search.lastn;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.http.Header;
|
import org.apache.http.Header;
|
||||||
import org.apache.http.HttpHost;
|
import org.apache.http.HttpHost;
|
||||||
import org.apache.http.auth.AuthScope;
|
import org.apache.http.auth.AuthScope;
|
||||||
|
@ -27,45 +29,51 @@ import org.apache.http.auth.UsernamePasswordCredentials;
|
||||||
import org.apache.http.client.CredentialsProvider;
|
import org.apache.http.client.CredentialsProvider;
|
||||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||||
import org.apache.http.message.BasicHeader;
|
import org.apache.http.message.BasicHeader;
|
||||||
|
import org.elasticsearch.client.Node;
|
||||||
import org.elasticsearch.client.RestClient;
|
import org.elasticsearch.client.RestClient;
|
||||||
import org.elasticsearch.client.RestClientBuilder;
|
import org.elasticsearch.client.RestClientBuilder;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class ElasticsearchRestClientFactory {
|
public class ElasticsearchRestClientFactory {
|
||||||
|
|
||||||
|
|
||||||
private static String determineScheme(String theHostname) {
|
static public RestHighLevelClient createElasticsearchHighLevelRestClient(
|
||||||
int schemeIdx = theHostname.indexOf("://");
|
String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) {
|
||||||
if (schemeIdx > 0) {
|
|
||||||
return theHostname.substring(0, schemeIdx);
|
if (hosts.contains("://")) {
|
||||||
} else {
|
throw new ConfigurationException("Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL.");
|
||||||
return "http";
|
|
||||||
}
|
}
|
||||||
}
|
String[] hostArray = hosts.split(",");
|
||||||
|
List<Node> clientNodes = Arrays.stream(hostArray)
|
||||||
private static String stripHostOfScheme(String theHostname) {
|
.map(String::trim)
|
||||||
int schemeIdx = theHostname.indexOf("://");
|
.filter(s -> s.contains(":"))
|
||||||
if (schemeIdx > 0) {
|
.map(h -> {
|
||||||
return theHostname.substring(schemeIdx + 3);
|
int colonIndex = h.indexOf(":");
|
||||||
} else {
|
String host = h.substring(0, colonIndex);
|
||||||
return theHostname;
|
int port = Integer.parseInt(h.substring(colonIndex + 1));
|
||||||
|
return new Node(new HttpHost(host, port, protocol));
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
if (hostArray.length != clientNodes.size()) {
|
||||||
|
throw new ConfigurationException("Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RestClientBuilder clientBuilder = RestClient.builder(clientNodes.toArray(new Node[0]));
|
||||||
|
if (StringUtils.isNotBlank(theUsername) && StringUtils.isNotBlank(thePassword)) {
|
||||||
|
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||||
|
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword));
|
||||||
|
clientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
|
||||||
|
.setDefaultCredentialsProvider(credentialsProvider));
|
||||||
|
}
|
||||||
|
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
|
||||||
|
clientBuilder.setDefaultHeaders(defaultHeaders);
|
||||||
|
|
||||||
|
return new RestHighLevelClient(clientBuilder);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
|
|
||||||
final CredentialsProvider credentialsProvider =
|
|
||||||
new BasicCredentialsProvider();
|
|
||||||
credentialsProvider.setCredentials(AuthScope.ANY,
|
|
||||||
new UsernamePasswordCredentials(theUsername, thePassword));
|
|
||||||
RestClientBuilder clientBuilder = RestClient.builder(
|
|
||||||
new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname)))
|
|
||||||
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
|
|
||||||
.setDefaultCredentialsProvider(credentialsProvider));
|
|
||||||
|
|
||||||
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
|
|
||||||
clientBuilder.setDefaultHeaders(defaultHeaders);
|
|
||||||
|
|
||||||
return new RestHighLevelClient(clientBuilder);
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,11 +68,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
|
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.search.sort.SortOrder;
|
import org.elasticsearch.search.sort.SortOrder;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
@ -125,13 +125,13 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
||||||
private PartitionSettings myPartitionSettings;
|
private PartitionSettings myPartitionSettings;
|
||||||
|
|
||||||
//This constructor used to inject a dummy partitionsettings in test.
|
//This constructor used to inject a dummy partitionsettings in test.
|
||||||
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) {
|
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
|
||||||
this(theHostname, thePort, theUsername, thePassword);
|
this(theHostname, theUsername, thePassword);
|
||||||
this.myPartitionSettings = thePartitionSetings;
|
this.myPartitionSettings = thePartitionSetings;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
|
public ElasticsearchSvcImpl(String theHostname, @Nullable String theUsername, @Nullable String thePassword) {
|
||||||
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword);
|
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", theHostname, theUsername, thePassword);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
createObservationIndexIfMissing();
|
createObservationIndexIfMissing();
|
||||||
|
|
|
@ -172,6 +172,10 @@ public class MemoryCacheService {
|
||||||
return (Cache<K, T>) myCaches.get(theCache);
|
return (Cache<K, T>) myCaches.get(theCache);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long getEstimatedSize(CacheEnum theCache) {
|
||||||
|
return getCache(theCache).estimatedSize();
|
||||||
|
}
|
||||||
|
|
||||||
public enum CacheEnum {
|
public enum CacheEnum {
|
||||||
|
|
||||||
TAG_DEFINITION(TagDefinitionCacheKey.class),
|
TAG_DEFINITION(TagDefinitionCacheKey.class),
|
||||||
|
|
|
@ -126,5 +126,8 @@ public class SqlQuery {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return getSql(true, true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
package ca.uhn.fhir.jpa.bulk;
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
|
||||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
|
||||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
|
@ -55,11 +54,8 @@ import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||||
import static org.mockito.ArgumentMatchers.isNull;
|
|
||||||
import static org.mockito.ArgumentMatchers.nullable;
|
import static org.mockito.ArgumentMatchers.nullable;
|
||||||
import static org.mockito.Mockito.eq;
|
import static org.mockito.Mockito.eq;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
|
@ -74,7 +70,7 @@ public class BulkDataExportProviderTest {
|
||||||
private static final String GROUP_ID = "Group/G2401";
|
private static final String GROUP_ID = "Group/G2401";
|
||||||
private static final String G_JOB_ID = "0000000-GGGGGG";
|
private static final String G_JOB_ID = "0000000-GGGGGG";
|
||||||
private Server myServer;
|
private Server myServer;
|
||||||
private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
|
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||||
private int myPort;
|
private int myPort;
|
||||||
@Mock
|
@Mock
|
||||||
private IBulkDataExportSvc myBulkDataExportSvc;
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
@ -478,9 +474,9 @@ public class BulkDataExportProviderTest {
|
||||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||||
|
|
||||||
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT
|
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT
|
||||||
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);;
|
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);
|
||||||
|
|
||||||
HttpGet get = new HttpGet(url);
|
HttpGet get = new HttpGet(url);
|
||||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
CloseableHttpResponse execute = myClient.execute(get);
|
CloseableHttpResponse execute = myClient.execute(get);
|
||||||
|
|
||||||
|
|
|
@ -21,12 +21,12 @@ public class ResourceVersionCacheSvcTest extends BaseJpaR4Test {
|
||||||
IIdType patientId = myPatientDao.create(patient).getId();
|
IIdType patientId = myPatientDao.create(patient).getId();
|
||||||
ResourceVersionMap versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
|
ResourceVersionMap versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
|
||||||
assertEquals(1, versionMap.size());
|
assertEquals(1, versionMap.size());
|
||||||
assertEquals("1", versionMap.getVersion(patientId));
|
assertEquals(1L, versionMap.getVersion(patientId));
|
||||||
|
|
||||||
patient.setGender(Enumerations.AdministrativeGender.MALE);
|
patient.setGender(Enumerations.AdministrativeGender.MALE);
|
||||||
myPatientDao.update(patient);
|
myPatientDao.update(patient);
|
||||||
versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
|
versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
|
||||||
assertEquals(1, versionMap.size());
|
assertEquals(1, versionMap.size());
|
||||||
assertEquals("2", versionMap.getVersion(patientId));
|
assertEquals(2L, versionMap.getVersion(patientId));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,147 @@
|
||||||
|
package ca.uhn.fhir.jpa.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.config.BlockLargeNumbersOfParamsListener;
|
||||||
|
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||||
|
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
|
||||||
|
import ca.uhn.fhir.jpa.dao.r4.ElasticsearchPrefixTest;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.HapiElasticsearchAnalysisConfigurer;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
|
||||||
|
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||||
|
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||||
|
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||||
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
|
import org.elasticsearch.client.RequestOptions;
|
||||||
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
||||||
|
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||||
|
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||||
|
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||||
|
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The only reason this is its own class is so that we can set a dao config setting before the whole test framework comes online.
|
||||||
|
* We need to do this as it is during bean creation that HS bootstrapping occurs.
|
||||||
|
*/
|
||||||
|
@Configuration
|
||||||
|
public class ElasticsearchWithPrefixConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DaoConfig daoConfig() {
|
||||||
|
DaoConfig daoConfig = new DaoConfig();
|
||||||
|
daoConfig.setElasticSearchIndexPrefix(ElasticsearchPrefixTest.ELASTIC_PREFIX);
|
||||||
|
return daoConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public IndexNamePrefixLayoutStrategy indexNamePrefixLayoutStrategy() {
|
||||||
|
return new IndexNamePrefixLayoutStrategy();
|
||||||
|
}
|
||||||
|
@Bean
|
||||||
|
public FhirContext fhirContext() {
|
||||||
|
return FhirContext.forR4();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
|
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
|
||||||
|
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
|
||||||
|
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
||||||
|
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
|
||||||
|
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
|
||||||
|
retVal.setDataSource(dataSource());
|
||||||
|
retVal.setJpaProperties(jpaProperties());
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
@Bean
|
||||||
|
public DataSource dataSource() {
|
||||||
|
BasicDataSource retVal = new BasicDataSource();
|
||||||
|
retVal.setDriver(new org.h2.Driver());
|
||||||
|
retVal.setUrl("jdbc:h2:mem:testdb_r4");
|
||||||
|
retVal.setMaxWaitMillis(30000);
|
||||||
|
retVal.setUsername("");
|
||||||
|
retVal.setPassword("");
|
||||||
|
retVal.setMaxTotal(5);
|
||||||
|
|
||||||
|
SLF4JLogLevel level = SLF4JLogLevel.INFO;
|
||||||
|
DataSource dataSource = ProxyDataSourceBuilder
|
||||||
|
.create(retVal)
|
||||||
|
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS, level)
|
||||||
|
.beforeQuery(new BlockLargeNumbersOfParamsListener())
|
||||||
|
.afterQuery(new CurrentThreadCaptureQueriesListener())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Properties jpaProperties() {
|
||||||
|
Properties extraProperties = new Properties();
|
||||||
|
extraProperties.put("hibernate.format_sql", "false");
|
||||||
|
extraProperties.put("hibernate.show_sql", "false");
|
||||||
|
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||||
|
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||||
|
//Override default lucene settings
|
||||||
|
// Force elasticsearch to start first
|
||||||
|
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port
|
||||||
|
String host = elasticContainer().getHost();
|
||||||
|
// the below properties are used for ElasticSearch integration
|
||||||
|
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), host + ":" + httpPort);
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), "http");
|
||||||
|
extraProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, SchemaManagementStrategyName.CREATE.externalRepresentation());
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(10000));
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), IndexStatus.YELLOW.externalRepresentation());
|
||||||
|
// Need the mapping to be dynamic because of terminology indexes.
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true");
|
||||||
|
// Only for unit tests
|
||||||
|
extraProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, "read-sync");
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(true));
|
||||||
|
|
||||||
|
//This tells elasticsearch to use our custom index naming strategy.
|
||||||
|
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
|
||||||
|
|
||||||
|
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
|
||||||
|
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
|
||||||
|
.settings(Settings.builder().put("index.max_ngram_diff", 50));
|
||||||
|
|
||||||
|
try {
|
||||||
|
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http", host + ":" + httpPort, "", "");
|
||||||
|
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
|
||||||
|
assert acknowledgedResponse.isAcknowledged();
|
||||||
|
} catch (IOException theE) {
|
||||||
|
theE.printStackTrace();
|
||||||
|
throw new ConfigurationException("Couldn't connect to the elasticsearch server to create necessary templates. Ensure the Elasticsearch user has permissions to create templates.");
|
||||||
|
}
|
||||||
|
return extraProperties;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ElasticsearchContainer elasticContainer() {
|
||||||
|
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
|
||||||
|
embeddedElasticSearch.start();
|
||||||
|
return embeddedElasticSearch;
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,7 @@ import org.hibernate.search.engine.cfg.BackendSettings;
|
||||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -134,8 +135,8 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2");
|
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2");
|
||||||
retVal.setDataSource(dataSource());
|
retVal.setDataSource(dataSource());
|
||||||
retVal.setJpaProperties(jpaProperties());
|
retVal.setJpaProperties(jpaProperties());
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -138,8 +139,8 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
|
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
|
||||||
retVal.setDataSource(dataSource());
|
retVal.setDataSource(dataSource());
|
||||||
retVal.setJpaProperties(jpaProperties());
|
retVal.setJpaProperties(jpaProperties());
|
||||||
|
|
|
@ -3,12 +3,14 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||||
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
|
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
|
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
|
||||||
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
|
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
|
||||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
|
||||||
import org.springframework.batch.core.explore.JobExplorer;
|
import org.springframework.batch.core.explore.JobExplorer;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.config;
|
package ca.uhn.fhir.jpa.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||||
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
||||||
|
@ -14,6 +15,8 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||||
import org.apache.commons.dbcp2.BasicDataSource;
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
import org.hibernate.dialect.H2Dialect;
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -138,10 +141,15 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
return new SingleQueryCountHolder();
|
return new SingleQueryCountHolder();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
|
||||||
|
configureEntityManagerFactory(retVal, fhirContext());
|
||||||
|
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
|
||||||
|
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
||||||
|
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
|
||||||
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
|
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
|
||||||
retVal.setDataSource(dataSource());
|
retVal.setDataSource(dataSource());
|
||||||
retVal.setJpaProperties(jpaProperties());
|
retVal.setJpaProperties(jpaProperties());
|
||||||
|
|
|
@ -1,13 +1,19 @@
|
||||||
package ca.uhn.fhir.jpa.config;
|
package ca.uhn.fhir.jpa.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
|
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||||
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
|
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
|
||||||
|
import org.h2.index.Index;
|
||||||
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
|
||||||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.DependsOn;
|
||||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||||
|
|
||||||
import javax.annotation.PreDestroy;
|
import javax.annotation.PreDestroy;
|
||||||
|
@ -35,7 +41,7 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
||||||
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
|
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
|
||||||
.setIndexManagementWaitTimeoutMillis(10000)
|
.setIndexManagementWaitTimeoutMillis(10000)
|
||||||
.setRequiredIndexStatus(IndexStatus.YELLOW)
|
.setRequiredIndexStatus(IndexStatus.YELLOW)
|
||||||
.setRestUrl(host+ ":" + httpPort)
|
.setHosts(host + ":" + httpPort)
|
||||||
.setProtocol("http")
|
.setProtocol("http")
|
||||||
.setUsername("")
|
.setUsername("")
|
||||||
.setPassword("")
|
.setPassword("")
|
||||||
|
|
|
@ -21,7 +21,7 @@ public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElastic
|
||||||
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
||||||
int elasticsearchPort = elasticContainer().getMappedPort(9200);
|
int elasticsearchPort = elasticContainer().getMappedPort(9200);
|
||||||
String host = elasticContainer().getHost();
|
String host = elasticContainer().getHost();
|
||||||
return new ElasticsearchSvcImpl(host, elasticsearchPort, "", "");
|
return new ElasticsearchSvcImpl(host + ":" + elasticsearchPort, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@PreDestroy
|
@PreDestroy
|
||||||
|
|
|
@ -3,9 +3,11 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
import org.hibernate.dialect.H2Dialect;
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
|
||||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||||
|
@ -27,8 +29,8 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setDataSource(dataSource());
|
retVal.setDataSource(dataSource());
|
||||||
retVal.setJpaProperties(jpaProperties());
|
retVal.setJpaProperties(jpaProperties());
|
||||||
return retVal;
|
return retVal;
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
@ -138,8 +139,8 @@ public class TestR5Config extends BaseJavaConfigR5 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||||
retVal.setPersistenceUnitName("PU_HapiFhirJpaR5");
|
retVal.setPersistenceUnitName("PU_HapiFhirJpaR5");
|
||||||
retVal.setDataSource(dataSource());
|
retVal.setDataSource(dataSource());
|
||||||
retVal.setJpaProperties(jpaProperties());
|
retVal.setJpaProperties(jpaProperties());
|
||||||
|
|
|
@ -13,6 +13,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
class BaseHapiFhirResourceDaoTest {
|
class BaseHapiFhirResourceDaoTest {
|
||||||
|
|
||||||
TestResourceDao mySvc = new TestResourceDao();
|
TestResourceDao mySvc = new TestResourceDao();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -721,6 +721,4 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
}
|
}
|
||||||
Thread.sleep(500);
|
Thread.sleep(500);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
package ca.uhn.fhir.jpa.dao;
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.parser.LenientErrorHandler;
|
import ca.uhn.fhir.parser.LenientErrorHandler;
|
||||||
import org.hl7.fhir.r4.model.Observation;
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
|
|
||||||
public class TolerantJsonParserR4Test {
|
public class TolerantJsonParserR4Test {
|
||||||
|
|
||||||
private FhirContext myFhirContext = FhirContext.forCached(FhirVersionEnum.R4);
|
private final FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParseInvalidNumeric_LeadingDecimal() {
|
public void testParseInvalidNumeric_LeadingDecimal() {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
package ca.uhn.fhir.jpa.dao;
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
|
||||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||||
|
@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
import org.hibernate.internal.SessionImpl;
|
import org.hibernate.internal.SessionImpl;
|
||||||
|
@ -70,6 +71,10 @@ public class TransactionProcessorTest {
|
||||||
private MatchUrlService myMatchUrlService;
|
private MatchUrlService myMatchUrlService;
|
||||||
@MockBean
|
@MockBean
|
||||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||||
|
@MockBean
|
||||||
|
private IResourceVersionSvc myResourceVersionSvc;
|
||||||
|
@MockBean
|
||||||
|
private SearchParamMatcher mySearchParamMatcher;
|
||||||
|
|
||||||
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
|
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
|
||||||
private SessionImpl mySession;
|
private SessionImpl mySession;
|
||||||
|
@ -120,7 +125,7 @@ public class TransactionProcessorTest {
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public FhirContext fhirContext() {
|
public FhirContext fhirContext() {
|
||||||
return FhirContext.forCached(FhirVersionEnum.R4);
|
return FhirContext.forR4Cached();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
|
|
@ -1033,7 +1033,7 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
myPatientDao.search(map).size();
|
myPatientDao.search(map).size();
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1070,7 +1070,7 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
myPatientDao.search(map).size();
|
myPatientDao.search(map).size();
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try with normal gender SP
|
// Try with normal gender SP
|
||||||
|
|
|
@ -727,9 +727,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
||||||
params.add("_id", new StringDt("TEST"));
|
params.add("_id", new StringDt("TEST"));
|
||||||
assertEquals(1, toList(myPatientDao.search(params)).size());
|
assertEquals(1, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
params.add("_language", new StringParam("TEST"));
|
|
||||||
assertEquals(1, toList(myPatientDao.search(params)).size());
|
|
||||||
|
|
||||||
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
||||||
assertEquals(1, toList(myPatientDao.search(params)).size());
|
assertEquals(1, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
|
@ -744,9 +741,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
||||||
params.add("_id", new StringDt("TEST"));
|
params.add("_id", new StringDt("TEST"));
|
||||||
assertEquals(0, toList(myPatientDao.search(params)).size());
|
assertEquals(0, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
params.add("_language", new StringParam("TEST"));
|
|
||||||
assertEquals(0, toList(myPatientDao.search(params)).size());
|
|
||||||
|
|
||||||
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
||||||
assertEquals(0, toList(myPatientDao.search(params)).size());
|
assertEquals(0, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
|
@ -766,148 +760,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSearchLanguageParam() {
|
|
||||||
IIdType id1;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguage().setValue("en_CA");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
|
||||||
patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe");
|
|
||||||
id1 = myPatientDao.create(patient, mySrd).getId();
|
|
||||||
}
|
|
||||||
IIdType id2;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguage().setValue("en_US");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("002");
|
|
||||||
patient.addName().addFamily("testSearchLanguageParam").addGiven("John");
|
|
||||||
id2 = myPatientDao.create(patient, mySrd).getId();
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_CA"));
|
|
||||||
List<IResource> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(1, patients.size());
|
|
||||||
assertEquals(id1.toUnqualifiedVersionless(), patients.get(0).getId().toUnqualifiedVersionless());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_US"));
|
|
||||||
List<Patient> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(1, patients.size());
|
|
||||||
assertEquals(id2.toUnqualifiedVersionless(), patients.get(0).getId().toUnqualifiedVersionless());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringParam("en_GB"));
|
|
||||||
List<Patient> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(0, patients.size());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSearchLanguageParamAndOr() {
|
|
||||||
IIdType id1;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguage().setValue("en_CA");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
|
||||||
patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe");
|
|
||||||
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
}
|
|
||||||
|
|
||||||
Date betweenTime = new Date();
|
|
||||||
|
|
||||||
IIdType id2;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguage().setValue("en_US");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("002");
|
|
||||||
patient.addName().addFamily("testSearchLanguageParam").addGiven("John");
|
|
||||||
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
|
|
||||||
params.setLastUpdated(new DateRangeParam(betweenTime, null));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add("_id", new StringParam(id1.getIdPart()));
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(BaseResource.SP_RES_LANGUAGE, and);
|
|
||||||
params.add("_id", new StringParam(id1.getIdPart()));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchLastUpdatedParam() throws InterruptedException {
|
public void testSearchLastUpdatedParam() throws InterruptedException {
|
||||||
String methodName = "testSearchLastUpdatedParam";
|
String methodName = "testSearchLastUpdatedParam";
|
||||||
|
|
|
@ -224,7 +224,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCantSearchForDeletedResourceByLanguageOrTag() {
|
public void testCantSearchForDeletedResourceByTag() {
|
||||||
String methodName = "testCantSearchForDeletedResourceByLanguageOrTag";
|
String methodName = "testCantSearchForDeletedResourceByLanguageOrTag";
|
||||||
Organization org = new Organization();
|
Organization org = new Organization();
|
||||||
org.setLanguage(new CodeDt("EN_ca"));
|
org.setLanguage(new CodeDt("EN_ca"));
|
||||||
|
@ -236,9 +236,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
|
|
||||||
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
SearchParameterMap map = new SearchParameterMap();
|
SearchParameterMap map;
|
||||||
map.add("_language", new StringParam("EN_ca"));
|
|
||||||
assertEquals(1, myOrganizationDao.search(map).size().intValue());
|
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
map = new SearchParameterMap();
|
||||||
map.add("_tag", new TokenParam(methodName, methodName));
|
map.add("_tag", new TokenParam(methodName, methodName));
|
||||||
|
@ -246,10 +244,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
|
|
||||||
myOrganizationDao.delete(orgId, mySrd);
|
myOrganizationDao.delete(orgId, mySrd);
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
|
||||||
map.add("_language", new StringParam("EN_ca"));
|
|
||||||
assertEquals(0, myOrganizationDao.search(map).size().intValue());
|
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
map = new SearchParameterMap();
|
||||||
map.add("_tag", new TokenParam(methodName, methodName));
|
map.add("_tag", new TokenParam(methodName, methodName));
|
||||||
assertEquals(0, myOrganizationDao.search(map).size().intValue());
|
assertEquals(0, myOrganizationDao.search(map).size().intValue());
|
||||||
|
@ -1603,7 +1597,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
found = toList(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01"))));
|
found = toList(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01"))));
|
||||||
assertEquals(0, found.size());
|
assertEquals(0, found.size());
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, careprovider, deathdate, deceased, email, family, gender, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1015,7 +1015,7 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
myPatientDao.search(map).size();
|
myPatientDao.search(map).size();
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1053,7 +1053,7 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
myPatientDao.search(map).size();
|
myPatientDao.search(map).size();
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"foo\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try with normal gender SP
|
// Try with normal gender SP
|
||||||
|
|
|
@ -1192,11 +1192,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
||||||
params.add("_id", new StringParam("TEST"));
|
params.add("_id", new StringParam("TEST"));
|
||||||
assertEquals(1, toList(myPatientDao.search(params)).size());
|
assertEquals(1, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add("_language", new StringParam("TEST"));
|
|
||||||
assertEquals(1, toList(myPatientDao.search(params)).size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
params = new SearchParameterMap();
|
||||||
params.setLoadSynchronous(true);
|
params.setLoadSynchronous(true);
|
||||||
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
||||||
|
@ -1214,11 +1209,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
||||||
params.add("_id", new StringParam("TEST"));
|
params.add("_id", new StringParam("TEST"));
|
||||||
assertEquals(0, toList(myPatientDao.search(params)).size());
|
assertEquals(0, toList(myPatientDao.search(params)).size());
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
params.add("_language", new StringParam("TEST"));
|
|
||||||
assertEquals(0, toList(myPatientDao.search(params)).size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
params = new SearchParameterMap();
|
||||||
params.setLoadSynchronous(true);
|
params.setLoadSynchronous(true);
|
||||||
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
|
||||||
|
@ -1241,143 +1231,6 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSearchLanguageParam() {
|
|
||||||
IIdType id1;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguageElement().setValue("en_CA");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
|
||||||
patient.addName().setFamily("testSearchLanguageParam").addGiven("Joe");
|
|
||||||
id1 = myPatientDao.create(patient, mySrd).getId();
|
|
||||||
}
|
|
||||||
IIdType id2;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguageElement().setValue("en_US");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("002");
|
|
||||||
patient.addName().setFamily("testSearchLanguageParam").addGiven("John");
|
|
||||||
id2 = myPatientDao.create(patient, mySrd).getId();
|
|
||||||
}
|
|
||||||
SearchParameterMap params;
|
|
||||||
{
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_CA"));
|
|
||||||
List<IBaseResource> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(1, patients.size());
|
|
||||||
assertEquals(id1.toUnqualifiedVersionless(), patients.get(0).getIdElement().toUnqualifiedVersionless());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_US"));
|
|
||||||
List<Patient> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(1, patients.size());
|
|
||||||
assertEquals(id2.toUnqualifiedVersionless(), patients.get(0).getIdElement().toUnqualifiedVersionless());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.setLoadSynchronous(true);
|
|
||||||
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringParam("en_GB"));
|
|
||||||
List<Patient> patients = toList(myPatientDao.search(params));
|
|
||||||
assertEquals(0, patients.size());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSearchLanguageParamAndOr() {
|
|
||||||
IIdType id1;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguageElement().setValue("en_CA");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
|
||||||
patient.addName().setFamily("testSearchLanguageParam").addGiven("Joe");
|
|
||||||
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
}
|
|
||||||
TestUtil.sleepOneClick();
|
|
||||||
Date betweenTime = new Date();
|
|
||||||
|
|
||||||
IIdType id2;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getLanguageElement().setValue("en_US");
|
|
||||||
patient.addIdentifier().setSystem("urn:system").setValue("002");
|
|
||||||
patient.addName().setFamily("testSearchLanguageParam").addGiven("John");
|
|
||||||
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
|
|
||||||
params.setLastUpdated(new DateRangeParam(betweenTime, null));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("ZZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add("_id", new StringParam(id1.getIdPart()));
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
{
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
StringAndListParam and = new StringAndListParam();
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
|
|
||||||
and.addAnd(new StringOrListParam().addOr(new StringParam("")).addOr(new StringParam(null)));
|
|
||||||
params.add(IAnyResource.SP_RES_LANGUAGE, and);
|
|
||||||
params.add("_id", new StringParam(id1.getIdPart()));
|
|
||||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchLastUpdatedParam() {
|
public void testSearchLastUpdatedParam() {
|
||||||
String methodName = "testSearchLastUpdatedParam";
|
String methodName = "testSearchLastUpdatedParam";
|
||||||
|
|
|
@ -206,21 +206,12 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
||||||
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
SearchParameterMap map = new SearchParameterMap();
|
SearchParameterMap map = new SearchParameterMap();
|
||||||
map.add("_language", new StringParam("EN_ca"));
|
|
||||||
assertEquals(1, myOrganizationDao.search(map).size().intValue());
|
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
|
||||||
map.setLoadSynchronous(true);
|
map.setLoadSynchronous(true);
|
||||||
map.add("_tag", new TokenParam(methodName, methodName));
|
map.add("_tag", new TokenParam(methodName, methodName));
|
||||||
assertEquals(1, myOrganizationDao.search(map).size().intValue());
|
assertEquals(1, myOrganizationDao.search(map).size().intValue());
|
||||||
|
|
||||||
myOrganizationDao.delete(orgId, mySrd);
|
myOrganizationDao.delete(orgId, mySrd);
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
|
||||||
map.setLoadSynchronous(true);
|
|
||||||
map.add("_language", new StringParam("EN_ca"));
|
|
||||||
assertEquals(0, myOrganizationDao.search(map).size().intValue());
|
|
||||||
|
|
||||||
map = new SearchParameterMap();
|
map = new SearchParameterMap();
|
||||||
map.setLoadSynchronous(true);
|
map.setLoadSynchronous(true);
|
||||||
map.add("_tag", new TokenParam(methodName, methodName));
|
map.add("_tag", new TokenParam(methodName, methodName));
|
||||||
|
@ -2014,7 +2005,7 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
||||||
found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01")).setLoadSynchronous(true)));
|
found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_BIRTHDATE + "AAAA", new DateParam(ParamPrefixEnum.GREATERTHAN, "2000-01-01")).setLoadSynchronous(true)));
|
||||||
assertEquals(0, found.size());
|
assertEquals(0, found.size());
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _language, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
assertEquals("Unknown search parameter \"birthdateAAAA\" for resource type \"Patient\". Valid search parameters for this search are: [_id, _lastUpdated, active, address, address-city, address-country, address-postalcode, address-state, address-use, animal-breed, animal-species, birthdate, death-date, deceased, email, family, gender, general-practitioner, given, identifier, language, link, name, organization, phone, phonetic, telecom]", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
package ca.uhn.fhir.jpa.dao.index;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
|
||||||
|
|
||||||
public class IdHelperServiceTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testReplaceDefault_AllPartitions() {
|
|
||||||
|
|
||||||
IdHelperService svc = new IdHelperService();
|
|
||||||
PartitionSettings partitionSettings = new PartitionSettings();
|
|
||||||
partitionSettings.setDefaultPartitionId(1);
|
|
||||||
svc.setPartitionSettingsForUnitTest(partitionSettings);
|
|
||||||
|
|
||||||
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.allPartitions());
|
|
||||||
assertSame(RequestPartitionId.allPartitions(), outcome);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testReplaceDefault_DefaultPartition() {
|
|
||||||
|
|
||||||
IdHelperService svc = new IdHelperService();
|
|
||||||
PartitionSettings partitionSettings = new PartitionSettings();
|
|
||||||
partitionSettings.setDefaultPartitionId(1);
|
|
||||||
svc.setPartitionSettingsForUnitTest(partitionSettings);
|
|
||||||
|
|
||||||
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.defaultPartition());
|
|
||||||
assertEquals(1, outcome.getPartitionIds().get(0));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -0,0 +1,209 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.index;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||||
|
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.InjectMocks;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.Mockito;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import javax.persistence.TypedQuery;
|
||||||
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
import javax.persistence.criteria.Path;
|
||||||
|
import javax.persistence.criteria.Root;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class ResourceVersionSvcTest {
|
||||||
|
|
||||||
|
// helper class to package up data for helper methods
|
||||||
|
private class ResourceIdPackage {
|
||||||
|
public IIdType MyResourceId;
|
||||||
|
public ResourcePersistentId MyPid;
|
||||||
|
public Long MyVersion;
|
||||||
|
|
||||||
|
public ResourceIdPackage(IIdType id,
|
||||||
|
ResourcePersistentId pid,
|
||||||
|
Long version) {
|
||||||
|
MyResourceId = id;
|
||||||
|
MyPid = pid;
|
||||||
|
MyVersion = version;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
DaoRegistry myDaoRegistry;
|
||||||
|
@Mock
|
||||||
|
IResourceTableDao myResourceTableDao;
|
||||||
|
@Mock
|
||||||
|
IdHelperService myIdHelperService;
|
||||||
|
|
||||||
|
// TODO KHS move the methods that use this out to a separate test class
|
||||||
|
@InjectMocks
|
||||||
|
private ResourceVersionSvcDaoImpl myResourceVersionSvc;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a ResourceTable record for getResourceVersionsForPid
|
||||||
|
* Order matters!
|
||||||
|
* @param resourceType
|
||||||
|
* @param pid
|
||||||
|
* @param version
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private Object[] getResourceTableRecordForResourceTypeAndPid(String resourceType, long pid, long version) {
|
||||||
|
return new Object[] {
|
||||||
|
pid, // long
|
||||||
|
resourceType, // string
|
||||||
|
version // long
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to mock out resolveResourcePersistentIdsWithCache
|
||||||
|
* to return empty lists (as if no resources were found).
|
||||||
|
*/
|
||||||
|
private void mock_resolveResourcePersistentIdsWithCache_toReturnNothing() {
|
||||||
|
CriteriaBuilder cb = Mockito.mock(CriteriaBuilder.class);
|
||||||
|
CriteriaQuery<ForcedId> criteriaQuery = Mockito.mock(CriteriaQuery.class);
|
||||||
|
Root<ForcedId> from = Mockito.mock(Root.class);
|
||||||
|
Path path = Mockito.mock(Path.class);
|
||||||
|
|
||||||
|
TypedQuery<ForcedId> queryMock = Mockito.mock(TypedQuery.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to mock out getIdsOfExistingResources
|
||||||
|
* to return the matches and resources matching those provided
|
||||||
|
* by parameters.
|
||||||
|
* @param theResourcePacks
|
||||||
|
*/
|
||||||
|
private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theResourcePacks) {
|
||||||
|
List<ResourcePersistentId> resourcePersistentIds = new ArrayList<>();
|
||||||
|
List<Object[]> matches = new ArrayList<>();
|
||||||
|
|
||||||
|
for (ResourceIdPackage pack : theResourcePacks) {
|
||||||
|
resourcePersistentIds.add(pack.MyPid);
|
||||||
|
|
||||||
|
matches.add(getResourceTableRecordForResourceTypeAndPid(
|
||||||
|
pack.MyResourceId.getResourceType(),
|
||||||
|
pack.MyPid.getIdAsLong(),
|
||||||
|
pack.MyVersion
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
ResourcePersistentId first = resourcePersistentIds.remove(0);
|
||||||
|
if (resourcePersistentIds.isEmpty()) {
|
||||||
|
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getLatestVersionIdsForResourceIds_whenResourceExists_returnsMapWithPIDAndVersion() {
|
||||||
|
IIdType type = new IdDt("Patient/RED");
|
||||||
|
ResourcePersistentId pid = new ResourcePersistentId(1L);
|
||||||
|
pid.setAssociatedResourceId(type);
|
||||||
|
HashMap<IIdType, ResourcePersistentId> map = new HashMap<>();
|
||||||
|
map.put(type, pid);
|
||||||
|
ResourceIdPackage pack = new ResourceIdPackage(type, pid, 2L);
|
||||||
|
|
||||||
|
// when
|
||||||
|
mockReturnsFor_getIdsOfExistingResources(pack);
|
||||||
|
|
||||||
|
// test
|
||||||
|
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
|
||||||
|
Collections.singletonList(type));
|
||||||
|
|
||||||
|
Assertions.assertTrue(retMap.containsKey(type));
|
||||||
|
Assertions.assertEquals(pid.getVersion(), map.get(type).getVersion());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getLatestVersionIdsForResourceIds_whenResourceDoesNotExist_returnsEmptyMap() {
|
||||||
|
IIdType type = new IdDt("Patient/RED");
|
||||||
|
|
||||||
|
// when
|
||||||
|
mock_resolveResourcePersistentIdsWithCache_toReturnNothing();
|
||||||
|
|
||||||
|
// test
|
||||||
|
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
|
||||||
|
Collections.singletonList(type));
|
||||||
|
|
||||||
|
Assertions.assertTrue(retMap.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getLatestVersionIdsForResourceIds_whenSomeResourcesDoNotExist_returnsOnlyExistingElements() {
|
||||||
|
// resource to be found
|
||||||
|
IIdType type = new IdDt("Patient/RED");
|
||||||
|
ResourcePersistentId pid = new ResourcePersistentId(1L);
|
||||||
|
pid.setAssociatedResourceId(type);
|
||||||
|
ResourceIdPackage pack = new ResourceIdPackage(type, pid, 2L);
|
||||||
|
|
||||||
|
// resource that won't be found
|
||||||
|
IIdType type2 = new IdDt("Patient/BLUE");
|
||||||
|
|
||||||
|
// when
|
||||||
|
mock_resolveResourcePersistentIdsWithCache_toReturnNothing();
|
||||||
|
mockReturnsFor_getIdsOfExistingResources(pack);
|
||||||
|
|
||||||
|
// test
|
||||||
|
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(
|
||||||
|
RequestPartitionId.allPartitions(),
|
||||||
|
Arrays.asList(type, type2)
|
||||||
|
);
|
||||||
|
|
||||||
|
// verify
|
||||||
|
Assertions.assertEquals(1, retMap.size());
|
||||||
|
Assertions.assertTrue(retMap.containsKey(type));
|
||||||
|
Assertions.assertFalse(retMap.containsKey(type2));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReplaceDefault_AllPartitions() {
|
||||||
|
|
||||||
|
IdHelperService svc = new IdHelperService();
|
||||||
|
PartitionSettings partitionSettings = new PartitionSettings();
|
||||||
|
partitionSettings.setDefaultPartitionId(1);
|
||||||
|
svc.setPartitionSettingsForUnitTest(partitionSettings);
|
||||||
|
|
||||||
|
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.allPartitions());
|
||||||
|
assertSame(RequestPartitionId.allPartitions(), outcome);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReplaceDefault_DefaultPartition() {
|
||||||
|
|
||||||
|
IdHelperService svc = new IdHelperService();
|
||||||
|
PartitionSettings partitionSettings = new PartitionSettings();
|
||||||
|
partitionSettings.setDefaultPartitionId(1);
|
||||||
|
svc.setPartitionSettingsForUnitTest(partitionSettings);
|
||||||
|
|
||||||
|
RequestPartitionId outcome = svc.replaceDefault(RequestPartitionId.defaultPartition());
|
||||||
|
assertEquals(1, outcome.getPartitionIds().get(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,388 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.hl7.fhir.r4.model.Organization;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.hl7.fhir.r4.model.StringType;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Disabled;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
|
||||||
|
public class ChainingR4SearchTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
MatchUrlService myMatchUrlService;
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void after() throws Exception {
|
||||||
|
|
||||||
|
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
|
||||||
|
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
|
||||||
|
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||||
|
myDaoConfig.setCountSearchResultsUpTo(new DaoConfig().getCountSearchResultsUpTo());
|
||||||
|
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||||
|
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||||
|
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||||
|
|
||||||
|
myModelConfig.setIndexOnContainedResources(false);
|
||||||
|
myModelConfig.setIndexOnContainedResources(new ModelConfig().isIndexOnContainedResources());
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void before() throws Exception {
|
||||||
|
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||||
|
|
||||||
|
myDaoConfig.setAllowMultipleDelete(true);
|
||||||
|
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||||
|
myModelConfig.setIndexOnContainedResources(true);
|
||||||
|
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveATwoLinkChainWithStandAloneResources() throws Exception {
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.name=Smith";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveATwoLinkChainWithAContainedResource() throws Exception {
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId("pat");
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getContained().add(p);
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.setValue(new StringType("Test"));
|
||||||
|
obs.getSubject().setReference("#pat");
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.name=Smith";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId(IdType.newRandomUuid());
|
||||||
|
org.setName("HealthCo");
|
||||||
|
myOrganizationDao.create(org, mySrd);
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference(org.getId());
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.organization.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||||
|
// This is the case that is most relevant to SMILE-2899
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId("org");
|
||||||
|
org.setName("HealthCo");
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.getContained().add(org);
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference("#org");
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.organization.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() throws Exception {
|
||||||
|
// We do not currently support this case - we may not be indexing the references of contained resources
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId(IdType.newRandomUuid());
|
||||||
|
org.setName("HealthCo");
|
||||||
|
myOrganizationDao.create(org, mySrd);
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId("pat");
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference(org.getId());
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getContained().add(p);
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference("#pat");
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.organization.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() throws Exception {
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId(IdType.newRandomUuid());
|
||||||
|
org.setName("HealthCo");
|
||||||
|
myOrganizationDao.create(org, mySrd);
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference(org.getId());
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||||
|
// This is the case that is most relevant to SMILE-2899
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId("org");
|
||||||
|
org.setName("HealthCo");
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.getContained().add(org);
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference("#org");
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId(IdType.newRandomUuid());
|
||||||
|
org.setName("HealthCo");
|
||||||
|
myOrganizationDao.create(org, mySrd);
|
||||||
|
|
||||||
|
Organization partOfOrg = new Organization();
|
||||||
|
partOfOrg.setId(IdType.newRandomUuid());
|
||||||
|
partOfOrg.getPartOf().setReference(org.getId());
|
||||||
|
myOrganizationDao.create(partOfOrg, mySrd);
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference(partOfOrg.getId());
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() throws Exception {
|
||||||
|
|
||||||
|
// setup
|
||||||
|
IIdType oid1;
|
||||||
|
|
||||||
|
{
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId("parent");
|
||||||
|
org.setName("HealthCo");
|
||||||
|
|
||||||
|
Organization partOfOrg = new Organization();
|
||||||
|
partOfOrg.setId(IdType.newRandomUuid());
|
||||||
|
partOfOrg.getContained().add(org);
|
||||||
|
partOfOrg.getPartOf().setReference("#parent");
|
||||||
|
myOrganizationDao.create(partOfOrg, mySrd);
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId(IdType.newRandomUuid());
|
||||||
|
p.addName().setFamily("Smith").addGiven("John");
|
||||||
|
p.getManagingOrganization().setReference(partOfOrg.getId());
|
||||||
|
myPatientDao.create(p, mySrd);
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("Observation 1");
|
||||||
|
obs.getSubject().setReference(p.getId());
|
||||||
|
|
||||||
|
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||||
|
|
||||||
|
// execute
|
||||||
|
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertEquals(1L, oids.size());
|
||||||
|
assertThat(oids, contains(oid1.getIdPart()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String theUrl) throws IOException {
|
||||||
|
List<String> ids = new ArrayList<>();
|
||||||
|
|
||||||
|
ResourceSearch search = myMatchUrlService.getResourceSearch(theUrl);
|
||||||
|
SearchParameterMap map = search.getSearchParameterMap();
|
||||||
|
map.setLoadSynchronous(true);
|
||||||
|
IBundleProvider result = myObservationDao.search(map);
|
||||||
|
return result.getAllResourceIds();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue