Merge branch 'master' into ja_20200206_multitenancy

This commit is contained in:
James Agnew 2020-03-03 09:25:19 -05:00
commit 4a160a2df3
182 changed files with 2318 additions and 712 deletions

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -1,7 +1,7 @@
package example; package example;
import org.hl7.fhir.converter.NullVersionConverterAdvisor30; import org.hl7.fhir.convertors.conv10_30.Observation10_30;
import org.hl7.fhir.convertors.*; import org.hl7.fhir.convertors.conv14_30.Questionnaire14_30;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
public class ConverterExamples { public class ConverterExamples {
@ -9,16 +9,12 @@ public class ConverterExamples {
@SuppressWarnings("unused") @SuppressWarnings("unused")
public void c1020() throws FHIRException { public void c1020() throws FHIRException {
//START SNIPPET: 1020 //START SNIPPET: 1020
// Create a converter
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
// Create an input resource to convert // Create an input resource to convert
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation(); org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123")); input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
// Convert the resource // Convert the resource
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input); org.hl7.fhir.dstu3.model.Observation output = Observation10_30.convertObservation(input);
String context = output.getContext().getReference(); String context = output.getContext().getReference();
//END SNIPPET: 1020 //END SNIPPET: 1020
} }
@ -31,7 +27,7 @@ public class ConverterExamples {
input.setTitle("My title"); input.setTitle("My title");
// Convert the resource // Convert the resource
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input); org.hl7.fhir.dstu3.model.Questionnaire output = Questionnaire14_30.convertQuestionnaire(input);
String context = output.getTitle(); String context = output.getTitle();
//END SNIPPET: 1420 //END SNIPPET: 1420
} }

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -22,8 +22,11 @@ package ca.uhn.fhir.context;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.util.*; import java.util.*;
@ -33,11 +36,11 @@ public abstract class BaseRuntimeElementDefinition<T extends IBase> {
private final Class<? extends T> myImplementingClass; private final Class<? extends T> myImplementingClass;
private final String myName; private final String myName;
private final boolean myStandardType; private final boolean myStandardType;
private Map<Class<?>, Constructor<T>> myConstructors = Collections.synchronizedMap(new HashMap<Class<?>, Constructor<T>>()); private Map<Class<?>, Constructor<T>> myConstructors = Collections.synchronizedMap(new HashMap<>());
private List<RuntimeChildDeclaredExtensionDefinition> myExtensions = new ArrayList<RuntimeChildDeclaredExtensionDefinition>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensions = new ArrayList<>();
private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsModifier = new ArrayList<RuntimeChildDeclaredExtensionDefinition>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsModifier = new ArrayList<>();
private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsNonModifier = new ArrayList<RuntimeChildDeclaredExtensionDefinition>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsNonModifier = new ArrayList<>();
private Map<String, RuntimeChildDeclaredExtensionDefinition> myUrlToExtension = new HashMap<String, RuntimeChildDeclaredExtensionDefinition>(); private Map<String, RuntimeChildDeclaredExtensionDefinition> myUrlToExtension = new HashMap<>();
private BaseRuntimeElementDefinition<?> myRootParentDefinition; private BaseRuntimeElementDefinition<?> myRootParentDefinition;
public BaseRuntimeElementDefinition(String theName, Class<? extends T> theImplementingClass, boolean theStandardType) { public BaseRuntimeElementDefinition(String theName, Class<? extends T> theImplementingClass, boolean theStandardType) {
@ -56,19 +59,17 @@ public abstract class BaseRuntimeElementDefinition<T extends IBase> {
myImplementingClass = theImplementingClass; myImplementingClass = theImplementingClass;
} }
public void addExtension(RuntimeChildDeclaredExtensionDefinition theExtension) { public void addExtension(@Nonnull RuntimeChildDeclaredExtensionDefinition theExtension) {
if (theExtension == null) { Validate.notNull(theExtension, "theExtension must not be null");
throw new NullPointerException();
}
myExtensions.add(theExtension); myExtensions.add(theExtension);
} }
public abstract ChildTypeEnum getChildType(); public abstract ChildTypeEnum getChildType();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Constructor<T> getConstructor(Object theArgument) { private Constructor<T> getConstructor(@Nullable Object theArgument) {
Class<? extends Object> argumentType; Class<?> argumentType;
if (theArgument == null) { if (theArgument == null) {
argumentType = VOID_CLASS; argumentType = VOID_CLASS;
} else { } else {

View File

@ -377,9 +377,8 @@ public class FhirContext {
*/ */
public RuntimeResourceDefinition getResourceDefinition(final Class<? extends IBaseResource> theResourceType) { public RuntimeResourceDefinition getResourceDefinition(final Class<? extends IBaseResource> theResourceType) {
validateInitialized(); validateInitialized();
if (theResourceType == null) { Validate.notNull(theResourceType, "theResourceType can not be null");
throw new NullPointerException("theResourceType can not be null");
}
if (Modifier.isAbstract(theResourceType.getModifiers())) { if (Modifier.isAbstract(theResourceType.getModifiers())) {
throw new IllegalArgumentException("Can not scan abstract or interface class (resource definitions must be concrete classes): " + theResourceType.getName()); throw new IllegalArgumentException("Can not scan abstract or interface class (resource definitions must be concrete classes): " + theResourceType.getName());
} }

View File

@ -0,0 +1,21 @@
package ca.uhn.fhir.rest.param;
import com.google.common.collect.Sets;
import org.junit.Test;
import static org.junit.Assert.*;
public class QualifierDetailsTest {
@Test
public void testBlacklist() {
QualifierDetails details = new QualifierDetails();
details.setColonQualifier(":Patient");
assertFalse(details.passes(null, Sets.newHashSet(":Patient")));
assertTrue(details.passes(null, Sets.newHashSet(":Observation")));
}
}

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -26,7 +26,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.QuoteMode; import org.apache.commons.csv.QuoteMode;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
@ -43,6 +42,7 @@ import java.util.List;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConceptMapCommand { public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConceptMapCommand {
// TODO: Don't use qualified names for loggers in HAPI CLI. // TODO: Don't use qualified names for loggers in HAPI CLI.
@ -114,7 +114,7 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept
private void convertConceptMapToCsv(org.hl7.fhir.dstu3.model.ConceptMap theConceptMap) throws ExecutionException { private void convertConceptMapToCsv(org.hl7.fhir.dstu3.model.ConceptMap theConceptMap) throws ExecutionException {
try { try {
convertConceptMapToCsv(VersionConvertor_30_40.convertConceptMap(theConceptMap)); convertConceptMapToCsv(convertConceptMap(theConceptMap));
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new ExecutionException(fe); throw new ExecutionException(fe);
} }

View File

@ -28,7 +28,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord; import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ConceptMap.ConceptMapGroupComponent; import org.hl7.fhir.r4.model.ConceptMap.ConceptMapGroupComponent;
@ -45,7 +44,10 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.apache.commons.lang3.StringUtils.*; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConceptMapCommand { public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConceptMapCommand {
// TODO: Don't use qualified names for loggers in HAPI CLI. // TODO: Don't use qualified names for loggers in HAPI CLI.
@ -154,7 +156,7 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept
private org.hl7.fhir.dstu3.model.ConceptMap convertCsvToConceptMapDstu3() throws ExecutionException { private org.hl7.fhir.dstu3.model.ConceptMap convertCsvToConceptMapDstu3() throws ExecutionException {
try { try {
return VersionConvertor_30_40.convertConceptMap(convertCsvToConceptMapR4()); return convertConceptMap(convertCsvToConceptMapR4());
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new ExecutionException(fe); throw new ExecutionException(fe);
} }
@ -174,7 +176,7 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept
.withFirstRecordAsHeader() .withFirstRecordAsHeader()
.withIgnoreHeaderCase() .withIgnoreHeaderCase()
.withIgnoreEmptyLines() .withIgnoreEmptyLines()
.withTrim()); .withTrim())
) { ) {
retVal.setUrl(conceptMapUrl); retVal.setUrl(conceptMapUrl);

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -31,7 +31,11 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter;
import org.hl7.fhir.converter.NullVersionConverterAdvisor30; import org.hl7.fhir.converter.NullVersionConverterAdvisor30;
import org.hl7.fhir.converter.NullVersionConverterAdvisor40; import org.hl7.fhir.converter.NullVersionConverterAdvisor40;
import org.hl7.fhir.convertors.*; import org.hl7.fhir.convertors.VersionConvertorAdvisor30;
import org.hl7.fhir.convertors.VersionConvertorAdvisor40;
import org.hl7.fhir.convertors.VersionConvertor_10_30;
import org.hl7.fhir.convertors.VersionConvertor_10_40;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.Resource; import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -40,7 +44,9 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import static org.apache.commons.lang3.StringUtils.*; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/** /**
* <b>This is an experimental interceptor! Use with caution as * <b>This is an experimental interceptor! Use with caution as
@ -54,16 +60,12 @@ import static org.apache.commons.lang3.StringUtils.*;
public class VersionedApiConverterInterceptor extends InterceptorAdapter { public class VersionedApiConverterInterceptor extends InterceptorAdapter {
private final FhirContext myCtxDstu2; private final FhirContext myCtxDstu2;
private final FhirContext myCtxDstu2Hl7Org; private final FhirContext myCtxDstu2Hl7Org;
private VersionConvertor_30_40 myVersionConvertor_30_40; private final NullVersionConverterAdvisor40 advisor40;
private VersionConvertor_10_40 myVersionConvertor_10_40; private final NullVersionConverterAdvisor30 advisor30;
private VersionConvertor_10_30 myVersionConvertor_10_30;
public VersionedApiConverterInterceptor() { public VersionedApiConverterInterceptor() {
myVersionConvertor_30_40 = new VersionConvertor_30_40(); advisor40 = new NullVersionConverterAdvisor40();
VersionConvertorAdvisor40 advisor40 = new NullVersionConverterAdvisor40(); advisor30 = new NullVersionConverterAdvisor30();
myVersionConvertor_10_40 = new VersionConvertor_10_40(advisor40);
VersionConvertorAdvisor30 advisor30 = new NullVersionConverterAdvisor30();
myVersionConvertor_10_30 = new VersionConvertor_10_30(advisor30);
myCtxDstu2 = FhirContext.forDstu2(); myCtxDstu2 = FhirContext.forDstu2();
myCtxDstu2Hl7Org = FhirContext.forDstu2Hl7Org(); myCtxDstu2Hl7Org = FhirContext.forDstu2Hl7Org();
@ -104,17 +106,17 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter {
IBaseResource converted = null; IBaseResource converted = null;
try { try {
if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU3) { if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU3) {
converted = myVersionConvertor_30_40.convertResource(toDstu3(responseResource), true); converted = VersionConvertor_30_40.convertResource(toDstu3(responseResource), true);
} else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.R4) { } else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.R4) {
converted = myVersionConvertor_30_40.convertResource(toR4(responseResource), true); converted = VersionConvertor_30_40.convertResource(toR4(responseResource), true);
} else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.R4) { } else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.R4) {
converted = myVersionConvertor_10_40.convertResource(toR4(responseResource)); converted = VersionConvertor_10_40.convertResource(toR4(responseResource), advisor40);
} else if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU2) { } else if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU2) {
converted = myVersionConvertor_10_40.convertResource(toDstu2(responseResource)); converted = VersionConvertor_10_40.convertResource(toDstu2(responseResource), advisor40);
} else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.DSTU3) { } else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.DSTU3) {
converted = myVersionConvertor_10_30.convertResource(toDstu3(responseResource)); converted = VersionConvertor_10_30.convertResource(toDstu3(responseResource), advisor30);
} else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.DSTU2) { } else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.DSTU2) {
converted = myVersionConvertor_10_30.convertResource(toDstu2(responseResource)); converted = VersionConvertor_10_30.convertResource(toDstu2(responseResource), advisor30);
} }
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);

View File

@ -27,8 +27,12 @@ import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeSystem; import org.hl7.fhir.r5.model.CodeSystem;
import org.hl7.fhir.r5.model.ValueSet; import org.hl7.fhir.r5.model.ValueSet;
import java.util.IdentityHashMap;
public class NullVersionConverterAdvisor50 implements VersionConvertorAdvisor50 { public class NullVersionConverterAdvisor50 implements VersionConvertorAdvisor50 {
private IdentityHashMap<ValueSet, CodeSystem> myCodeSystems = new IdentityHashMap<>();
@Override @Override
public boolean ignoreEntry(Bundle.BundleEntryComponent src) { public boolean ignoreEntry(Bundle.BundleEntryComponent src) {
return false; return false;
@ -56,11 +60,11 @@ public class NullVersionConverterAdvisor50 implements VersionConvertorAdvisor50
@Override @Override
public void handleCodeSystem(CodeSystem tgtcs, ValueSet source) throws FHIRException { public void handleCodeSystem(CodeSystem tgtcs, ValueSet source) throws FHIRException {
myCodeSystems.put(source, tgtcs);
} }
@Override @Override
public CodeSystem getCodeSystem(ValueSet src) throws FHIRException { public CodeSystem getCodeSystem(ValueSet src) throws FHIRException {
return null; return myCodeSystems.get(src);
} }
} }

View File

@ -16,13 +16,10 @@ public class VersionConvertor_10_30Test {
@Test @Test
public void testConvert() throws FHIRException { public void testConvert() throws FHIRException {
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation(); org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123")); input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input); org.hl7.fhir.dstu3.model.Observation output = (Observation) VersionConvertor_10_30.convertResource(input);
String context = output.getContext().getReference(); String context = output.getContext().getReference();
assertEquals("Encounter/123", context); assertEquals("Encounter/123", context);
@ -31,9 +28,6 @@ public class VersionConvertor_10_30Test {
@Test @Test
public void testConvertSpecimen() throws FHIRException { public void testConvertSpecimen() throws FHIRException {
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
Specimen spec = new Specimen(); Specimen spec = new Specimen();
CodeableConcept cc = new CodeableConcept(); CodeableConcept cc = new CodeableConcept();
Coding coding = new Coding(); Coding coding = new Coding();
@ -58,7 +52,7 @@ public class VersionConvertor_10_30Test {
Specimen.SpecimenContainerComponent specimenContainerComponent = new Specimen.SpecimenContainerComponent(); Specimen.SpecimenContainerComponent specimenContainerComponent = new Specimen.SpecimenContainerComponent();
specimenContainerComponent.getExtension().add(new Extension().setUrl("some_url").setValue(new StringType("some_value"))); specimenContainerComponent.getExtension().add(new Extension().setUrl("some_url").setValue(new StringType("some_value")));
spec.setContainer(Collections.singletonList(specimenContainerComponent)); spec.setContainer(Collections.singletonList(specimenContainerComponent));
Resource resource = converter.convertResource(spec); Resource resource = VersionConvertor_10_30.convertResource(spec);
} }

View File

@ -3,6 +3,7 @@ package org.hl7.fhir.converter;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.hl7.fhir.convertors.VersionConvertor_14_30; import org.hl7.fhir.convertors.VersionConvertor_14_30;
import org.hl7.fhir.dstu3.model.Questionnaire;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.junit.Test; import org.junit.Test;
@ -14,7 +15,7 @@ public class VersionConvertor_14_30Test {
org.hl7.fhir.dstu2016may.model.Questionnaire input = new org.hl7.fhir.dstu2016may.model.Questionnaire(); org.hl7.fhir.dstu2016may.model.Questionnaire input = new org.hl7.fhir.dstu2016may.model.Questionnaire();
input.setTitle("My title"); input.setTitle("My title");
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input); org.hl7.fhir.dstu3.model.Questionnaire output = (Questionnaire) VersionConvertor_14_30.convertResource(input);
String context = output.getTitle(); String context = output.getTitle();
assertEquals("My title", context); assertEquals("My title", context);

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -73,13 +73,13 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId> <artifactId>hapi-fhir-structures-dstu2</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-subscription</artifactId> <artifactId>hapi-fhir-jpaserver-subscription</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
@ -96,7 +96,7 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId> <artifactId>hapi-fhir-testpage-overlay</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<classifier>classes</classifier> <classifier>classes</classifier>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -20,9 +20,8 @@ package ca.uhn.hapi.fhir.docs;
* #L% * #L%
*/ */
import org.hl7.fhir.converter.NullVersionConverterAdvisor30; import org.hl7.fhir.convertors.conv10_30.Observation10_30;
import org.hl7.fhir.convertors.VersionConvertor_10_30; import org.hl7.fhir.convertors.conv14_30.Questionnaire14_30;
import org.hl7.fhir.convertors.VersionConvertor_14_30;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
public class ConverterExamples { public class ConverterExamples {
@ -30,16 +29,12 @@ public class ConverterExamples {
@SuppressWarnings("unused") @SuppressWarnings("unused")
public void c1020() throws FHIRException { public void c1020() throws FHIRException {
//START SNIPPET: 1020 //START SNIPPET: 1020
// Create a converter
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
// Create an input resource to convert // Create an input resource to convert
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation(); org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123")); input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
// Convert the resource // Convert the resource
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input); org.hl7.fhir.dstu3.model.Observation output = Observation10_30.convertObservation(input);
String context = output.getContext().getReference(); String context = output.getContext().getReference();
//END SNIPPET: 1020 //END SNIPPET: 1020
} }
@ -52,7 +47,7 @@ public class ConverterExamples {
input.setTitle("My title"); input.setTitle("My title");
// Convert the resource // Convert the resource
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input); org.hl7.fhir.dstu3.model.Questionnaire output = Questionnaire14_30.convertQuestionnaire(input);
String context = output.getTitle(); String context = output.getTitle();
//END SNIPPET: 1420 //END SNIPPET: 1420
} }

View File

@ -1,2 +1,3 @@
--- ---
release-date: "TBD" release-date: "2020-02-15"
codename: "Koala"

View File

@ -0,0 +1,5 @@
---
type: change
issue: 1693
title: Adjusted schema definitions for Resource and Resource History tables to eliminate circular
dependencies with Forced ID table and to improve performance when expunging large numbers of resources.

View File

@ -0,0 +1,4 @@
---
type: perf
issue: 1702
title: "Loading of _include and _revinclude values has been optimized to be slightly faster"

View File

@ -0,0 +1,6 @@
---
type: change
issue: 1715
title: The version converters for all versions except R4/R5 have been reworked to be split into individual
classes per resource type (the R4/R5 converters were already organized this way). Thanks to Mark Iantorno
for a huge effort to write a Java source parser/serializer to acomplish this task.

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 1717
title: ValueSet expansions containing lists of terms did not correctly expand when backed by
ElasticSearch due to the use of a feature not supported in ES. Thanks to Jens Villadsen for
reporting!

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 1721
title: When performing a terminology delta ADD operation, existing parent-child links were often
deleted and recrreated needlessly during operations, which could result in a deadlock. This has
been resolved.

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 1726
title: When performing date range searches in the JPA server, the server was generating extra
unneccessary joins in the generated SQL. This has been streamlined, which should result in
faster searches when performing date ranges.

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1728
title: "Fields of type `canonical` were not previously indexed by the JPA server, meaning that some
default search parameters could not be honoured (e.g. StructureDefinition:valueset). This is now corrected."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 1732
title: In the JPA server, quickly deleting a resource and then performing a query that had recently returned that
search result could cause a cached stub resource (containing no data but with an ID and metadata populated) to
be returned. This has been corrected.

View File

@ -0,0 +1,7 @@
---
type: add
issue: 1736
title: When performing large terminology concept additions via the delta addition service, concepts will
now be added via the deferred storage service, meaning that they will be added in small incremental batches
instead of as a part of one large transaction. This helps to avoid timeouts and memory issues when uploading
large collections of concepts.

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 1742
title: "When validating a resource, the validator will now report an error if the resource declares conformance
to an unknown or invalid profile URL via the `Resource.meta.profile` declaration. Previously this was a warning
and did not block successful validation."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 1742
title: "When performing a search in the JPA server where the only parameter was a `_has` parameter,
the server did not respect the resource typename being searched for, causing false positive
search results. This has been corrected."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 1742
title: When performing a terminology delta ADD operation, if the number of codes being added is large
the codes will be added in small batches via an asynchronous scheduled task in order to avoid overwhelming
the database with a large operation.

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version> <version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -48,6 +48,11 @@
<artifactId>commons-csv</artifactId> <artifactId>commons-csv</artifactId>
</dependency> </dependency>
<dependency>
<groupId>co.elastic.apm</groupId>
<artifactId>apm-agent-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-base</artifactId> <artifactId>hapi-fhir-base</artifactId>

View File

@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.subscription.dbmatcher.CompositeInMemoryDaoSubscriptionMa
import ca.uhn.fhir.jpa.subscription.dbmatcher.DaoSubscriptionMatcher; import ca.uhn.fhir.jpa.subscription.dbmatcher.DaoSubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribableChannelFactory; import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory; import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher; import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher; import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
@ -204,10 +205,15 @@ public abstract class BaseConfig {
* Create a @Primary @Bean if you need a different implementation * Create a @Primary @Bean if you need a different implementation
*/ */
@Bean @Bean
public ISubscribableChannelFactory linkedBlockingQueueSubscribableChannelFactory() { public ISubscribableChannelFactory subscribableChannelFactory() {
return new LinkedBlockingQueueSubscribableChannelFactory(); return new LinkedBlockingQueueSubscribableChannelFactory();
} }
@Bean
public SubscriptionChannelFactory subscriptionChannelFactory() {
return new SubscriptionChannelFactory();
}
@Bean @Bean
@Primary @Primary
public ISubscriptionMatcher subscriptionMatcherCompositeInMemoryDatabase() { public ISubscriptionMatcher subscriptionMatcherCompositeInMemoryDatabase() {
@ -280,6 +286,4 @@ public abstract class BaseConfig {
private static HapiFhirHibernateJpaDialect hibernateJpaDialect(HapiLocalizer theLocalizer) { private static HapiFhirHibernateJpaDialect hibernateJpaDialect(HapiLocalizer theLocalizer) {
return new HapiFhirHibernateJpaDialect(theLocalizer); return new HapiFhirHibernateJpaDialect(theLocalizer);
} }
} }

View File

@ -153,8 +153,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Autowired @Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc; protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
@Autowired @Autowired
protected ISearchParamRegistry mySerarchParamRegistry;
@Autowired
protected ITermReadSvc myTerminologySvc; protected ITermReadSvc myTerminologySvc;
@Autowired @Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao; protected IResourceHistoryTableDao myResourceHistoryTableDao;
@ -162,8 +160,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
protected IResourceTableDao myResourceTableDao; protected IResourceTableDao myResourceTableDao;
@Autowired @Autowired
protected IResourceTagDao myResourceTagDao; protected IResourceTagDao myResourceTagDao;
@Autowired
protected ISearchParamRegistry mySearchParamRegistry;
@Autowired @Autowired
protected DeleteConflictService myDeleteConflictService; protected DeleteConflictService myDeleteConflictService;
@Autowired @Autowired
@ -1550,4 +1547,5 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
"Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType()); "Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType());
} }
} }
} }

View File

@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.delete.DeleteConflictList; import ca.uhn.fhir.jpa.delete.DeleteConflictList;
@ -39,16 +38,12 @@ import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils; import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils; import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.server.*; import ca.uhn.fhir.rest.api.server.*;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.QualifierDetails;
import ca.uhn.fhir.rest.server.exceptions.*; import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.ObjectUtil; import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.OperationOutcomeUtil;
@ -1164,38 +1159,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return retVal; return retVal;
} }
@Transactional(propagation = Propagation.SUPPORTS)
@Override
public void translateRawParameters(Map<String, List<String>> theSource, SearchParameterMap theTarget) {
if (theSource == null || theSource.isEmpty()) {
return;
}
Map<String, RuntimeSearchParam> searchParams = mySerarchParamRegistry.getActiveSearchParams(getResourceName());
Set<String> paramNames = theSource.keySet();
for (String nextParamName : paramNames) {
QualifierDetails qualifiedParamName = SearchMethodBinding.extractQualifiersFromParameterName(nextParamName);
RuntimeSearchParam param = searchParams.get(qualifiedParamName.getParamName());
if (param == null) {
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", qualifiedParamName.getParamName(), new TreeSet<>(searchParams.keySet()));
throw new InvalidRequestException(msg);
}
// Should not be null since the check above would have caught it
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceName);
RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(resourceDef, qualifiedParamName.getParamName());
for (String nextValue : theSource.get(nextParamName)) {
QualifiedParamList qualifiedParam = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifiedParamName.getWholeQualifier(), nextValue);
List<QualifiedParamList> paramList = Collections.singletonList(qualifiedParam);
IQueryParameterAnd<?> parsedParam = ParameterUtil.parseQueryParams(getContext(), paramDef, nextParamName, paramList);
theTarget.add(qualifiedParamName.getParamName(), parsedParam);
}
}
}
@Override @Override
public DaoMethodOutcome update(T theResource) { public DaoMethodOutcome update(T theResource) {
return update(theResource, null, null); return update(theResource, null, null);

View File

@ -21,19 +21,24 @@ package ca.uhn.fhir.jpa.dao;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails; import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.*;
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails; import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails; import ca.uhn.fhir.rest.param.QualifierDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.BundleUtil; import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.FhirTerser;
@ -44,12 +49,14 @@ import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import java.util.List; import java.util.*;
import java.util.Set;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_ERROR; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_ERROR;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_INFO; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_INFO;
@ -57,6 +64,8 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public abstract class BaseStorageDao { public abstract class BaseStorageDao {
@Autowired
protected ISearchParamRegistry mySearchParamRegistry;
/** /**
* May be overridden by subclasses to validate resources prior to storage * May be overridden by subclasses to validate resources prior to storage
@ -206,4 +215,36 @@ public abstract class BaseStorageDao {
*/ */
protected abstract FhirContext getContext(); protected abstract FhirContext getContext();
@Transactional(propagation = Propagation.SUPPORTS)
public void translateRawParameters(Map<String, List<String>> theSource, SearchParameterMap theTarget) {
if (theSource == null || theSource.isEmpty()) {
return;
}
Map<String, RuntimeSearchParam> searchParams = mySearchParamRegistry.getActiveSearchParams(getResourceName());
Set<String> paramNames = theSource.keySet();
for (String nextParamName : paramNames) {
QualifierDetails qualifiedParamName = SearchMethodBinding.extractQualifiersFromParameterName(nextParamName);
RuntimeSearchParam param = searchParams.get(qualifiedParamName.getParamName());
if (param == null) {
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", qualifiedParamName.getParamName(), new TreeSet<>(searchParams.keySet()));
throw new InvalidRequestException(msg);
}
// Should not be null since the check above would have caught it
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(getResourceName());
RuntimeSearchParam paramDef = mySearchParamRegistry.getSearchParamByName(resourceDef, qualifiedParamName.getParamName());
for (String nextValue : theSource.get(nextParamName)) {
QualifiedParamList qualifiedParam = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifiedParamName.getWholeQualifier(), nextValue);
List<QualifiedParamList> paramList = Collections.singletonList(qualifiedParam);
IQueryParameterAnd<?> parsedParam = ParameterUtil.parseQueryParams(getContext(), paramDef, nextParamName, paramList);
theTarget.add(qualifiedParamName.getParamName(), parsedParam);
}
}
}
} }

View File

@ -102,7 +102,7 @@ public class DaoConfig {
/** /**
* update setter javadoc if default changes * update setter javadoc if default changes
*/ */
private int myDeferIndexingForCodesystemsOfSize = 2000; private int myDeferIndexingForCodesystemsOfSize = 100;
private boolean myDeleteStaleSearches = true; private boolean myDeleteStaleSearches = true;
private boolean myEnforceReferentialIntegrityOnDelete = true; private boolean myEnforceReferentialIntegrityOnDelete = true;
private boolean myUniqueIndexesEnabled = true; private boolean myUniqueIndexesEnabled = true;
@ -397,7 +397,7 @@ public class DaoConfig {
* the code system will be indexed later in an incremental process in order to * the code system will be indexed later in an incremental process in order to
* avoid overwhelming Lucene with a huge number of codes in a single operation. * avoid overwhelming Lucene with a huge number of codes in a single operation.
* <p> * <p>
* Defaults to 2000 * Defaults to 100
* </p> * </p>
*/ */
public int getDeferIndexingForCodesystemsOfSize() { public int getDeferIndexingForCodesystemsOfSize() {
@ -409,7 +409,7 @@ public class DaoConfig {
* the code system will be indexed later in an incremental process in order to * the code system will be indexed later in an incremental process in order to
* avoid overwhelming Lucene with a huge number of codes in a single operation. * avoid overwhelming Lucene with a huge number of codes in a single operation.
* <p> * <p>
* Defaults to 2000 * Defaults to 100
* </p> * </p>
*/ */
public void setDeferIndexingForCodesystemsOfSize(int theDeferIndexingForCodesystemsOfSize) { public void setDeferIndexingForCodesystemsOfSize(int theDeferIndexingForCodesystemsOfSize) {

View File

@ -29,4 +29,6 @@ public interface IResultIterator extends Iterator<ResourcePersistentId>, Closeab
int getSkippedCount(); int getSkippedCount();
int getNonSkippedCount();
} }

View File

@ -29,17 +29,29 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.predicate.*; import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilder;
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderFactory;
import ca.uhn.fhir.jpa.dao.predicate.QueryRoot;
import ca.uhn.fhir.jpa.dao.predicate.SearchBuilderJoinEnum;
import ca.uhn.fhir.jpa.dao.predicate.SearchBuilderJoinKey;
import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId; import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedCompositeStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.util.*; import ca.uhn.fhir.jpa.util.BaseIterator;
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.jpa.util.SqlQueryList;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.Include;
@ -80,10 +92,27 @@ import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType; import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
import javax.persistence.criteria.*; import javax.persistence.criteria.CriteriaBuilder;
import java.util.*; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Order;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.*; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/** /**
* The SearchBuilder is responsible for actually forming the SQL query that handles * The SearchBuilder is responsible for actually forming the SQL query that handles
@ -103,8 +132,9 @@ public class SearchBuilder implements ISearchBuilder {
private static final List<ResourcePersistentId> EMPTY_LONG_LIST = Collections.unmodifiableList(new ArrayList<>()); private static final List<ResourcePersistentId> EMPTY_LONG_LIST = Collections.unmodifiableList(new ArrayList<>());
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
private static ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L); private static ResourcePersistentId NO_MORE = new ResourcePersistentId(-1L);
@Autowired private final QueryRoot myQueryRoot = new QueryRoot();
private DaoConfig myDaoConfig; private final String myResourceName;
private final Class<? extends IBaseResource> myResourceType;
@Autowired @Autowired
protected IInterceptorBroadcaster myInterceptorBroadcaster; protected IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired @Autowired
@ -112,6 +142,8 @@ public class SearchBuilder implements ISearchBuilder {
@PersistenceContext(type = PersistenceContextType.TRANSACTION) @PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager; protected EntityManager myEntityManager;
@Autowired @Autowired
private DaoConfig myDaoConfig;
@Autowired
private IResourceSearchViewDao myResourceSearchViewDao; private IResourceSearchViewDao myResourceSearchViewDao;
@Autowired @Autowired
private FhirContext myContext; private FhirContext myContext;
@ -123,7 +155,6 @@ public class SearchBuilder implements ISearchBuilder {
private ISearchParamRegistry mySearchParamRegistry; private ISearchParamRegistry mySearchParamRegistry;
@Autowired @Autowired
private PredicateBuilderFactory myPredicateBuilderFactory; private PredicateBuilderFactory myPredicateBuilderFactory;
private List<ResourcePersistentId> myAlsoIncludePids; private List<ResourcePersistentId> myAlsoIncludePids;
private CriteriaBuilder myBuilder; private CriteriaBuilder myBuilder;
private IDao myCallingDao; private IDao myCallingDao;
@ -133,9 +164,6 @@ public class SearchBuilder implements ISearchBuilder {
private Integer myMaxResultsToFetch; private Integer myMaxResultsToFetch;
private Set<ResourcePersistentId> myPidSet; private Set<ResourcePersistentId> myPidSet;
private PredicateBuilder myPredicateBuilder; private PredicateBuilder myPredicateBuilder;
private final QueryRoot myQueryRoot = new QueryRoot();
private final String myResourceName;
private final Class<? extends IBaseResource> myResourceType;
/** /**
* Constructor * Constructor
@ -254,7 +282,8 @@ public class SearchBuilder implements ISearchBuilder {
outerQuery.multiselect(myBuilder.countDistinct(myQueryRoot.getRoot())); outerQuery.multiselect(myBuilder.countDistinct(myQueryRoot.getRoot()));
} else { } else {
outerQuery.multiselect(myQueryRoot.get("myId").as(Long.class)); outerQuery.multiselect(myQueryRoot.get("myId").as(Long.class));
outerQuery.distinct(true); // KHS This distinct call is causing performance issues in large installations
// outerQuery.distinct(true);
} }
} }
@ -475,6 +504,9 @@ public class SearchBuilder implements ISearchBuilder {
ResourcePersistentId resourceId; ResourcePersistentId resourceId;
for (ResourceSearchView next : resourceSearchViewList) { for (ResourceSearchView next : resourceSearchViewList) {
if (next.getDeleted() != null) {
continue;
}
Class<? extends IBaseResource> resourceType = myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); Class<? extends IBaseResource> resourceType = myContext.getResourceDefinition(next.getResourceType()).getImplementingClass();
@ -594,6 +626,7 @@ public class SearchBuilder implements ISearchBuilder {
return new HashSet<>(); return new HashSet<>();
} }
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid"; String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
String findFieldName = theReverseMode ? "mySourceResourcePid" : "myTargetResourcePid";
Collection<ResourcePersistentId> nextRoundMatches = theMatches; Collection<ResourcePersistentId> nextRoundMatches = theMatches;
HashSet<ResourcePersistentId> allAdded = new HashSet<>(); HashSet<ResourcePersistentId> allAdded = new HashSet<>();
@ -618,17 +651,17 @@ public class SearchBuilder implements ISearchBuilder {
boolean matchAll = "*".equals(nextInclude.getValue()); boolean matchAll = "*".equals(nextInclude.getValue());
if (matchAll) { if (matchAll) {
String sql; String sql;
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) "; sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE); List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE);
for (Collection<ResourcePersistentId> nextPartition : partitions) { for (Collection<ResourcePersistentId> nextPartition : partitions) {
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class); TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition)); q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition));
List<ResourceLink> results = q.getResultList(); List<Long> results = q.getResultList();
for (ResourceLink resourceLink : results) { for (Long resourceLink : results) {
if (theReverseMode) { if (theReverseMode) {
pidsToInclude.add(new ResourcePersistentId(resourceLink.getSourceResourcePid())); pidsToInclude.add(new ResourcePersistentId(resourceLink));
} else { } else {
pidsToInclude.add(new ResourcePersistentId(resourceLink.getTargetResourcePid())); pidsToInclude.add(new ResourcePersistentId(resourceLink));
} }
} }
} }
@ -665,16 +698,16 @@ public class SearchBuilder implements ISearchBuilder {
boolean haveTargetTypesDefinedByParam = param.hasTargets(); boolean haveTargetTypesDefinedByParam = param.hasTargets();
if (targetResourceType != null) { if (targetResourceType != null) {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids) AND r.myTargetResourceType = :target_resource_type"; sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids) AND r.myTargetResourceType = :target_resource_type";
} else if (haveTargetTypesDefinedByParam) { } else if (haveTargetTypesDefinedByParam) {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids) AND r.myTargetResourceType in (:target_resource_types)"; sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids) AND r.myTargetResourceType in (:target_resource_types)";
} else { } else {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)"; sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
} }
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE); List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE);
for (Collection<ResourcePersistentId> nextPartition : partitions) { for (Collection<ResourcePersistentId> nextPartition : partitions) {
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class); TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
q.setParameter("src_path", nextPath); q.setParameter("src_path", nextPath);
q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition)); q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition));
if (targetResourceType != null) { if (targetResourceType != null) {
@ -682,18 +715,10 @@ public class SearchBuilder implements ISearchBuilder {
} else if (haveTargetTypesDefinedByParam) { } else if (haveTargetTypesDefinedByParam) {
q.setParameter("target_resource_types", param.getTargets()); q.setParameter("target_resource_types", param.getTargets());
} }
List<ResourceLink> results = q.getResultList(); List<Long> results = q.getResultList();
for (ResourceLink resourceLink : results) { for (Long resourceLink : results) {
if (theReverseMode) { if (resourceLink != null) {
Long pid = resourceLink.getSourceResourcePid(); pidsToInclude.add(new ResourcePersistentId(resourceLink));
if (pid != null) {
pidsToInclude.add(new ResourcePersistentId(pid));
}
} else {
Long pid = resourceLink.getTargetResourcePid();
if (pid != null) {
pidsToInclude.add(new ResourcePersistentId(pid));
}
} }
} }
} }
@ -965,6 +990,7 @@ public class SearchBuilder implements ISearchBuilder {
private SortSpec mySort; private SortSpec mySort;
private boolean myStillNeedToFetchIncludes; private boolean myStillNeedToFetchIncludes;
private int mySkipCount = 0; private int mySkipCount = 0;
private int myNonSkipCount = 0;
private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
mySearchRuntimeDetails = theSearchRuntimeDetails; mySearchRuntimeDetails = theSearchRuntimeDetails;
@ -994,14 +1020,7 @@ public class SearchBuilder implements ISearchBuilder {
myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum(); myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum();
} }
final TypedQuery<Long> query = createQuery(mySort, myMaxResultsToFetch, false, myRequest); initializeIteratorQuery(myMaxResultsToFetch);
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
// If the query resulted in extra results being requested // If the query resulted in extra results being requested
if (myAlsoIncludePids != null) { if (myAlsoIncludePids != null) {
@ -1036,11 +1055,32 @@ public class SearchBuilder implements ISearchBuilder {
ResourcePersistentId next = new ResourcePersistentId(nextLong); ResourcePersistentId next = new ResourcePersistentId(nextLong);
if (myPidSet.add(next)) { if (myPidSet.add(next)) {
myNext = next; myNext = next;
myNonSkipCount++;
break; break;
} else { } else {
mySkipCount++; mySkipCount++;
} }
} }
if (!myResultsIterator.hasNext()) {
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
myMaxResultsToFetch += 1000;
StorageProcessingMessage message = new StorageProcessingMessage();
String msg = "Pass completed with no matching results. This indicates an inefficient query! Retrying with new max count of " + myMaxResultsToFetch;
ourLog.warn(msg);
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(StorageProcessingMessage.class, message);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_WARNING, params);
initializeIteratorQuery(myMaxResultsToFetch);
}
}
}
} }
} }
@ -1100,6 +1140,20 @@ public class SearchBuilder implements ISearchBuilder {
} }
private void initializeIteratorQuery(Integer theMaxResultsToFetch) {
final TypedQuery<Long> query = createQuery(mySort, theMaxResultsToFetch, false, myRequest);
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
mySkipCount = 0;
myNonSkipCount = 0;
}
@Override @Override
public boolean hasNext() { public boolean hasNext() {
if (myNext == null) { if (myNext == null) {
@ -1122,6 +1176,11 @@ public class SearchBuilder implements ISearchBuilder {
return mySkipCount; return mySkipCount;
} }
@Override
public int getNonSkippedCount() {
return myNonSkipCount;
}
@Override @Override
public void close() { public void close() {
if (myResultsIterator != null) { if (myResultsIterator != null) {

View File

@ -50,4 +50,8 @@ public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null") @Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest); Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
@Modifying
@Query("DELETE FROM TermConcept t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
} }

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice; import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param; import org.springframework.data.repository.query.Param;
@ -39,4 +40,9 @@ public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConcep
@Query("SELECT t.myPid FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid") @Query("SELECT t.myPid FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid); Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
@Modifying
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myChildPid = :pid OR t.myParentPid = :pid")
void deleteByConceptPid(@Param("pid") Long theId);
} }

View File

@ -24,10 +24,10 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IContextValidationSupport; import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain; import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.dstu3.model.CodeSystem; import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeableConcept; import org.hl7.fhir.dstu3.model.CodeableConcept;
@ -53,6 +52,7 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
@Transactional @Transactional
public class FhirResourceDaoCodeSystemDstu3 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> { public class FhirResourceDaoCodeSystemDstu3 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> {
@ -147,7 +147,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends BaseHapiFhirResourceDao<Code
CodeSystem csDstu3 = (CodeSystem) theResource; CodeSystem csDstu3 = (CodeSystem) theResource;
org.hl7.fhir.r4.model.CodeSystem cs = VersionConvertor_30_40.convertCodeSystem(csDstu3); org.hl7.fhir.r4.model.CodeSystem cs = convertCodeSystem(csDstu3);
addPidToResource(theEntity, cs); addPidToResource(theEntity, cs);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, (ResourceTable) theEntity); myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, (ResourceTable) theEntity);

View File

@ -22,21 +22,24 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.TranslationMatch; import ca.uhn.fhir.jpa.term.TranslationMatch;
import ca.uhn.fhir.jpa.term.TranslationRequest; import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult; import ca.uhn.fhir.jpa.term.TranslationResult;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.ConceptMap; import org.hl7.fhir.dstu3.model.ConceptMap;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.UriType;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import java.util.Date; import java.util.Date;
@ -44,6 +47,8 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> { public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
@Autowired @Autowired
private ITermReadSvc myHapiTerminologySvc; private ITermReadSvc myHapiTerminologySvc;
@ -166,7 +171,7 @@ public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<Conc
if (retVal.getDeleted() == null) { if (retVal.getDeleted() == null) {
try { try {
ConceptMap conceptMap = (ConceptMap) theResource; ConceptMap conceptMap = (ConceptMap) theResource;
org.hl7.fhir.r4.model.ConceptMap converted = VersionConvertor_30_40.convertConceptMap(conceptMap); org.hl7.fhir.r4.model.ConceptMap converted = convertConceptMap(conceptMap);
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted); myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new InternalErrorException(fe); throw new InternalErrorException(fe);

View File

@ -33,12 +33,15 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil; import ca.uhn.fhir.util.ElementUtil;
import org.apache.commons.codec.binary.StringUtils; import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus; import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent;
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator; import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
@ -51,7 +54,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
@ -59,6 +61,7 @@ import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> { public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class); private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
@ -420,7 +423,7 @@ public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao<ValueS
if (retVal.getDeleted() == null) { if (retVal.getDeleted() == null) {
try { try {
ValueSet valueSet = (ValueSet) theResource; ValueSet valueSet = (ValueSet) theResource;
org.hl7.fhir.r4.model.ValueSet converted = VersionConvertor_30_40.convertValueSet(valueSet); org.hl7.fhir.r4.model.ValueSet converted = convertValueSet(valueSet);
myHapiTerminologySvc.storeTermValueSet(retVal, converted); myHapiTerminologySvc.storeTermValueSet(retVal, converted);
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new InternalErrorException(fe); throw new InternalErrorException(fe);

View File

@ -78,8 +78,6 @@ public class ExpungeEverythingService {
ourLog.info("BEGINNING GLOBAL $expunge"); ourLog.info("BEGINNING GLOBAL $expunge");
myTxTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); myTxTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
myTxTemplate.execute(t -> { myTxTemplate.execute(t -> {
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + ResourceHistoryTable.class.getSimpleName() + " d SET d.myForcedId = null"));
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + ResourceTable.class.getSimpleName() + " d SET d.myForcedId = null"));
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null")); counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
return null; return null;
}); });

View File

@ -39,13 +39,17 @@ import javax.persistence.criteria.Join;
import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Predicate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
@Component @Component
@Scope("prototype") @Scope("prototype")
public class PredicateBuilderDate extends BasePredicateBuilder implements IPredicateBuilder { public class PredicateBuilderDate extends BasePredicateBuilder implements IPredicateBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderDate.class); private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderDate.class);
private Map<String, Join<ResourceTable, ResourceIndexedSearchParamDate>> myJoinMap;
PredicateBuilderDate(SearchBuilder theSearchBuilder) { PredicateBuilderDate(SearchBuilder theSearchBuilder) {
super(theSearchBuilder); super(theSearchBuilder);
} }
@ -56,7 +60,18 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
List<? extends IQueryParameterType> theList, List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation operation) { SearchFilterParser.CompareOperation operation) {
Join<ResourceTable, ResourceIndexedSearchParamDate> join = createJoin(SearchBuilderJoinEnum.DATE, theParamName); boolean newJoin = false;
if (myJoinMap == null) {
myJoinMap = new HashMap<>();
}
String key = theResourceName + " " + theParamName;
Join<ResourceTable, ResourceIndexedSearchParamDate> join = myJoinMap.get(key);
if (join == null) {
join = createJoin(SearchBuilderJoinEnum.DATE, theParamName);
myJoinMap.put(key, join);
newJoin = true;
}
if (theList.get(0).getMissing() != null) { if (theList.get(0).getMissing() != null) {
Boolean missing = theList.get(0).getMissing(); Boolean missing = theList.get(0).getMissing();
@ -77,7 +92,14 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
} }
Predicate orPredicates = myBuilder.or(toArray(codePredicates)); Predicate orPredicates = myBuilder.or(toArray(codePredicates));
if (newJoin) {
Predicate identityAndValuePredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicates);
myQueryRoot.addPredicate(identityAndValuePredicate);
} else {
myQueryRoot.addPredicate(orPredicates); myQueryRoot.addPredicate(orPredicates);
}
return orPredicates; return orPredicates;
} }
@ -86,12 +108,13 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
String theParamName, String theParamName,
CriteriaBuilder theBuilder, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom) { From<?, ResourceIndexedSearchParamDate> theFrom) {
return createPredicateDate(theParam, Predicate predicateDate = createPredicateDate(theParam,
theResourceName, theResourceName,
theParamName, theParamName,
theBuilder, theBuilder,
theFrom, theFrom,
null); null);
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, predicateDate);
} }
private Predicate createPredicateDate(IQueryParameterType theParam, private Predicate createPredicateDate(IQueryParameterType theParam,
@ -99,7 +122,7 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
String theParamName, String theParamName,
CriteriaBuilder theBuilder, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom, From<?, ResourceIndexedSearchParamDate> theFrom,
SearchFilterParser.CompareOperation operation) { SearchFilterParser.CompareOperation theOperation) {
Predicate p; Predicate p;
if (theParam instanceof DateParam) { if (theParam instanceof DateParam) {
@ -109,7 +132,7 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
p = createPredicateDateFromRange(theBuilder, p = createPredicateDateFromRange(theBuilder,
theFrom, theFrom,
range, range,
operation); theOperation);
} else { } else {
// TODO: handle missing date param? // TODO: handle missing date param?
p = null; p = null;
@ -119,12 +142,12 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
p = createPredicateDateFromRange(theBuilder, p = createPredicateDateFromRange(theBuilder,
theFrom, theFrom,
range, range,
operation); theOperation);
} else { } else {
throw new IllegalArgumentException("Invalid token type: " + theParam.getClass()); throw new IllegalArgumentException("Invalid token type: " + theParam.getClass());
} }
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, p); return p;
} }
private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder, private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder,

View File

@ -430,7 +430,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
break; break;
case Constants.PARAM_HAS: case Constants.PARAM_HAS:
addPredicateHas(theAndOrParams, theRequest); addPredicateHas(theResourceName, theAndOrParams, theRequest);
break; break;
case Constants.PARAM_TAG: case Constants.PARAM_TAG:
@ -756,7 +756,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
return retVal; return retVal;
} }
private void addPredicateHas(List<List<IQueryParameterType>> theHasParameters, RequestDetails theRequest) { private void addPredicateHas(String theResourceType, List<List<IQueryParameterType>> theHasParameters, RequestDetails theRequest) {
for (List<? extends IQueryParameterType> nextOrList : theHasParameters) { for (List<? extends IQueryParameterType> nextOrList : theHasParameters) {
@ -811,8 +811,9 @@ class PredicateBuilderReference extends BasePredicateBuilder {
Join<ResourceTable, ResourceLink> join = myQueryRoot.join("myResourceLinksAsTarget", JoinType.LEFT); Join<ResourceTable, ResourceLink> join = myQueryRoot.join("myResourceLinksAsTarget", JoinType.LEFT);
Predicate pathPredicate = myPredicateBuilder.createResourceLinkPathPredicate(targetResourceType, paramReference, join); Predicate pathPredicate = myPredicateBuilder.createResourceLinkPathPredicate(targetResourceType, paramReference, join);
Predicate pidPredicate = join.get("mySourceResourcePid").in(subQ); Predicate sourceTypePredicate = myBuilder.equal(join.get("myTargetResourceType"), theResourceType);
Predicate andPredicate = myBuilder.and(pathPredicate, pidPredicate); Predicate sourcePidPredicate = join.get("mySourceResourcePid").in(subQ);
Predicate andPredicate = myBuilder.and(pathPredicate, sourcePidPredicate, sourceTypePredicate);
myQueryRoot.addPredicate(andPredicate); myQueryRoot.addPredicate(andPredicate);
} }
} }

View File

@ -148,7 +148,7 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
CodeSystem cs = (CodeSystem) theResource; CodeSystem cs = (CodeSystem) theResource;
addPidToResource(theEntity, theResource); addPidToResource(theEntity, theResource);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), (ResourceTable) theEntity); myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(cs), (ResourceTable) theEntity);
} }
return retVal; return retVal;

View File

@ -166,7 +166,7 @@ public class FhirResourceDaoConceptMapR5 extends BaseHapiFhirResourceDao<Concept
if (retVal.getDeleted() == null) { if (retVal.getDeleted() == null) {
ConceptMap conceptMap = (ConceptMap) theResource; ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(conceptMap)); myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(conceptMap));
} else { } else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal); myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
} }

View File

@ -415,7 +415,7 @@ public class FhirResourceDaoValueSetR5 extends BaseHapiFhirResourceDao<ValueSet>
if (myDaoConfig.isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) { if (myDaoConfig.isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
if (retVal.getDeleted() == null) { if (retVal.getDeleted() == null) {
ValueSet valueSet = (ValueSet) theResource; ValueSet valueSet = (ValueSet) theResource;
myHapiTerminologySvc.storeTermValueSet(retVal, org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSet)); myHapiTerminologySvc.storeTermValueSet(retVal, org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSet));
} else { } else {
myHapiTerminologySvc.deleteValueSetAndChildren(retVal); myHapiTerminologySvc.deleteValueSetAndChildren(retVal);
} }

View File

@ -47,7 +47,7 @@ public class TermCodeSystem implements Serializable {
@Column(name = "CODE_SYSTEM_URI", nullable = false, length = MAX_URL_LENGTH) @Column(name = "CODE_SYSTEM_URI", nullable = false, length = MAX_URL_LENGTH)
private String myCodeSystemUri; private String myCodeSystemUri;
@OneToOne() @OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CURRENT_VERSION_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER")) @JoinColumn(name = "CURRENT_VERSION_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER"))
private TermCodeSystemVersion myCurrentVersion; private TermCodeSystemVersion myCurrentVersion;
@Column(name = "CURRENT_VERSION_PID", nullable = true, insertable = false, updatable = false) @Column(name = "CURRENT_VERSION_PID", nullable = true, insertable = false, updatable = false)
@ -57,7 +57,7 @@ public class TermCodeSystem implements Serializable {
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CODESYSTEM_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CODESYSTEM_PID")
@Column(name = "PID") @Column(name = "PID")
private Long myPid; private Long myPid;
@OneToOne() @OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_RES")) @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_RES"))
private ResourceTable myResource; private ResourceTable myResource;
@Column(name = "RES_ID", insertable = false, updatable = false) @Column(name = "RES_ID", insertable = false, updatable = false)

View File

@ -50,7 +50,7 @@ public class TermCodeSystemVersion implements Serializable {
@Column(name = "PID") @Column(name = "PID")
private Long myId; private Long myId;
@OneToOne() @OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_CODESYSVER_RES_ID")) @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_CODESYSVER_RES_ID"))
private ResourceTable myResource; private ResourceTable myResource;
@ -64,7 +64,7 @@ public class TermCodeSystemVersion implements Serializable {
* This was added in HAPI FHIR 3.3.0 and is nullable just to avoid migration * This was added in HAPI FHIR 3.3.0 and is nullable just to avoid migration
* issued. It should be made non-nullable at some point. * issued. It should be made non-nullable at some point.
*/ */
@ManyToOne @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID")) @JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID"))
private TermCodeSystem myCodeSystem; private TermCodeSystem myCodeSystem;
@ -72,7 +72,7 @@ public class TermCodeSystemVersion implements Serializable {
private Long myCodeSystemPid; private Long myCodeSystemPid;
@SuppressWarnings("unused") @SuppressWarnings("unused")
@OneToOne(mappedBy = "myCurrentVersion", optional = true) @OneToOne(mappedBy = "myCurrentVersion", optional = true, fetch = FetchType.LAZY)
private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny; private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny;
@Column(name = "CS_DISPLAY", nullable = true, updatable = false, length = MAX_VERSION_LENGTH) @Column(name = "CS_DISPLAY", nullable = true, updatable = false, length = MAX_VERSION_LENGTH)

View File

@ -65,7 +65,7 @@ public class TermConcept implements Serializable {
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Column(name = "CONCEPT_UPDATED", nullable = true) @Column(name = "CONCEPT_UPDATED", nullable = true)
private Date myUpdated; private Date myUpdated;
@ManyToOne() @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID")) @JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem; private TermCodeSystemVersion myCodeSystem;
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false) @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
@ -79,11 +79,11 @@ public class TermConcept implements Serializable {
@Field(name = "myDisplayPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer")) @Field(name = "myDisplayPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
}) })
private String myDisplay; private String myDisplay;
@OneToMany(mappedBy = "myConcept", orphanRemoval = false) @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)
@Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer")) @Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer"))
@FieldBridge(impl = TermConceptPropertyFieldBridge.class) @FieldBridge(impl = TermConceptPropertyFieldBridge.class)
private Collection<TermConceptProperty> myProperties; private Collection<TermConceptProperty> myProperties;
@OneToMany(mappedBy = "myConcept", orphanRemoval = false) @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)
private Collection<TermConceptDesignation> myDesignations; private Collection<TermConceptDesignation> myDesignations;
@Id() @Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID") @SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID")

View File

@ -39,7 +39,7 @@ public class TermConceptDesignation implements Serializable {
public static final int MAX_LENGTH = 500; public static final int MAX_LENGTH = 500;
public static final int MAX_VAL_LENGTH = 2000; public static final int MAX_VAL_LENGTH = 2000;
@ManyToOne @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CONCEPT")) @JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CONCEPT"))
private TermConcept myConcept; private TermConcept myConcept;
@Id() @Id()
@ -62,7 +62,7 @@ public class TermConceptDesignation implements Serializable {
* *
* @since 3.5.0 * @since 3.5.0
*/ */
@ManyToOne @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV")) @JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV"))
private TermCodeSystemVersion myCodeSystemVersion; private TermCodeSystemVersion myCodeSystemVersion;

View File

@ -32,14 +32,14 @@ import java.io.Serializable;
public class TermConceptParentChildLink implements Serializable { public class TermConceptParentChildLink implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@ManyToOne() @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CHILD_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CHILD")) @JoinColumn(name = "CHILD_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CHILD"))
private TermConcept myChild; private TermConcept myChild;
@Column(name = "CHILD_PID", insertable = false, updatable = false) @Column(name = "CHILD_PID", insertable = false, updatable = false)
private Long myChildPid; private Long myChildPid;
@ManyToOne() @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CODESYSTEM_PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CS")) @JoinColumn(name = "CODESYSTEM_PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CS"))
private TermCodeSystemVersion myCodeSystem; private TermCodeSystemVersion myCodeSystem;
@ -47,7 +47,7 @@ public class TermConceptParentChildLink implements Serializable {
@Fields({@Field(name = "myCodeSystemVersionPid")}) @Fields({@Field(name = "myCodeSystemVersionPid")})
private long myCodeSystemVersionPid; private long myCodeSystemVersionPid;
@ManyToOne(cascade = {}) @ManyToOne(fetch = FetchType.LAZY, cascade = {})
@JoinColumn(name = "PARENT_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT")) @JoinColumn(name = "PARENT_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT"))
private TermConcept myParent; private TermConcept myParent;

View File

@ -44,7 +44,7 @@ public class TermConceptProperty implements Serializable {
private static final int MAX_LENGTH = 500; private static final int MAX_LENGTH = 500;
static final int MAX_PROPTYPE_ENUM_LENGTH = 6; static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
@ManyToOne @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT")) @JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
private TermConcept myConcept; private TermConcept myConcept;
/** /**
@ -52,7 +52,7 @@ public class TermConceptProperty implements Serializable {
* *
* @since 3.5.0 * @since 3.5.0
*/ */
@ManyToOne @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV")) @JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV"))
private TermCodeSystemVersion myCodeSystemVersion; private TermCodeSystemVersion myCodeSystemVersion;
@Id() @Id()

View File

@ -39,7 +39,6 @@ import ca.uhn.fhir.util.ValidateUtil;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.ICompositeType;
@ -61,6 +60,7 @@ import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim; import static org.apache.commons.lang3.StringUtils.trim;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
public class TerminologyUploaderProvider extends BaseJpaProvider { public class TerminologyUploaderProvider extends BaseJpaProvider {
@ -272,10 +272,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
CodeSystem nextCodeSystem; CodeSystem nextCodeSystem;
switch (getContext().getVersion().getVersion()) { switch (getContext().getVersion().getVersion()) {
case DSTU3: case DSTU3:
nextCodeSystem = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem); nextCodeSystem = convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem);
break; break;
case R5: case R5:
nextCodeSystem = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem); nextCodeSystem = org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem);
break; break;
default: default:
nextCodeSystem = (CodeSystem) theCodeSystem; nextCodeSystem = (CodeSystem) theCodeSystem;

View File

@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.provider.dstu3;
*/ */
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.term.TranslationRequest; import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult; import ca.uhn.fhir.jpa.term.TranslationResult;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.annotation.OperationParam;
@ -31,11 +31,21 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.CodeType;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.ConceptMap;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderDstu3<ConceptMap> { public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderDstu3<ConceptMap> {
@Operation(name = JpaConstants.OPERATION_TRANSLATE, idempotent = true, returnParameters = { @Operation(name = JpaConstants.OPERATION_TRANSLATE, idempotent = true, returnParameters = {
@OperationParam(name = "result", type = BooleanType.class, min = 1, max = 1), @OperationParam(name = "result", type = BooleanType.class, min = 1, max = 1),
@ -129,7 +139,7 @@ public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderD
TranslationResult result = dao.translate(translationRequest, theRequestDetails); TranslationResult result = dao.translate(translationRequest, theRequestDetails);
// Convert from R4 to DSTU3 // Convert from R4 to DSTU3
return VersionConvertor_30_40.convertParameters(result.toParameters()); return convertParameters(result.toParameters());
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new InternalErrorException(fe); throw new InternalErrorException(fe);
} finally { } finally {

View File

@ -21,17 +21,28 @@ package ca.uhn.fhir.jpa.provider.dstu3;
*/ */
import ca.uhn.fhir.jpa.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
import ca.uhn.fhir.rest.annotation.ConditionalUrlParam;
import ca.uhn.fhir.rest.annotation.Create;
import ca.uhn.fhir.rest.annotation.Delete;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.ResourceParam;
import ca.uhn.fhir.rest.annotation.Update;
import ca.uhn.fhir.rest.annotation.Validate;
import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.ValidationModeEnum; import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -41,6 +52,7 @@ import javax.servlet.http.HttpServletRequest;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META; import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_ADD; import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_ADD;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_DELETE; import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_DELETE;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaResourceProvider<T> { public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaResourceProvider<T> {
@ -91,7 +103,7 @@ public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaRes
RequestDetails theRequest) { RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
try { try {
return VersionConvertor_30_40.convertParameters(retVal); return convertParameters(retVal);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -107,7 +119,7 @@ public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaRes
RequestDetails theRequest) { RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
try { try {
return VersionConvertor_30_40.convertParameters(retVal); return convertParameters(retVal);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }

View File

@ -3,17 +3,26 @@ package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.DecimalType;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent; import org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -29,6 +38,7 @@ import java.util.TreeMap;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
/* /*
* #%L * #%L
@ -72,7 +82,7 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus<Bundl
) { ) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
try { try {
return VersionConvertor_30_40.convertParameters(retVal); return convertParameters(retVal);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -90,7 +100,7 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus<Bundl
) { ) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
try { try {
return VersionConvertor_30_40.convertParameters(retVal); return convertParameters(retVal);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }

View File

@ -122,7 +122,7 @@ public class BaseJpaResourceProviderConceptMapR5 extends JpaResourceProviderR5<C
IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao(); IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao();
TranslationResult result = dao.translate(translationRequest, theRequestDetails); TranslationResult result = dao.translate(translationRequest, theRequestDetails);
org.hl7.fhir.r4.model.Parameters parameters = result.toParameters(); org.hl7.fhir.r4.model.Parameters parameters = result.toParameters();
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters); return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} finally { } finally {
endRequest(theServletRequest); endRequest(theServletRequest);
} }

View File

@ -86,7 +86,7 @@ public class JpaResourceProviderR5<T extends IAnyResource> extends BaseJpaResour
RequestDetails theRequest) { RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters); return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} }
@ -99,7 +99,7 @@ public class JpaResourceProviderR5<T extends IAnyResource> extends BaseJpaResour
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS) BooleanType theExpungeOldVersions, @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS) BooleanType theExpungeOldVersions,
RequestDetails theRequest) { RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters); return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} }
@Operation(name = OPERATION_META, idempotent = true, returnParameters = { @Operation(name = OPERATION_META, idempotent = true, returnParameters = {

View File

@ -68,7 +68,7 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus<Bundle,
RequestDetails theRequestDetails RequestDetails theRequestDetails
) { ) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters); return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} }
@Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = { @Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
@ -82,7 +82,7 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus<Bundle,
RequestDetails theRequestDetails RequestDetails theRequestDetails
) { ) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters); return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} }
// This is generated by hand: // This is generated by hand:

View File

@ -227,10 +227,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
template.execute(new TransactionCallbackWithoutResult() { template.execute(new TransactionCallbackWithoutResult() {
@Override @Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) { protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ensureSearchEntityLoaded(); boolean entityLoaded = ensureSearchEntityLoaded();
assert entityLoaded;
} }
}); });
assert mySearchEntity != null;
assert mySearchEntity.getSearchType() != null;
switch (mySearchEntity.getSearchType()) { switch (mySearchEntity.getSearchType()) {
case HISTORY: case HISTORY:
return template.execute(theStatus -> doHistoryInTransaction(theFromIndex, theToIndex)); return template.execute(theStatus -> doHistoryInTransaction(theFromIndex, theToIndex));

View File

@ -56,6 +56,9 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails; import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
import ca.uhn.fhir.util.AsyncUtil; import ca.uhn.fhir.util.AsyncUtil;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import co.elastic.apm.api.ElasticApm;
import co.elastic.apm.api.Span;
import co.elastic.apm.api.Transaction;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
@ -601,7 +604,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
private List<ResourcePersistentId> myPreviouslyAddedResourcePids; private List<ResourcePersistentId> myPreviouslyAddedResourcePids;
private Integer myMaxResultsToFetch; private Integer myMaxResultsToFetch;
private SearchRuntimeDetails mySearchRuntimeDetails; private SearchRuntimeDetails mySearchRuntimeDetails;
private Transaction myParentTransaction;
/** /**
* Constructor * Constructor
*/ */
@ -614,6 +617,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
mySearchRuntimeDetails = new SearchRuntimeDetails(theRequest, mySearch.getUuid()); mySearchRuntimeDetails = new SearchRuntimeDetails(theRequest, mySearch.getUuid());
mySearchRuntimeDetails.setQueryString(theParams.toNormalizedQueryString(theCallingDao.getContext())); mySearchRuntimeDetails.setQueryString(theParams.toNormalizedQueryString(theCallingDao.getContext()));
myRequest = theRequest; myRequest = theRequest;
myParentTransaction = ElasticApm.currentTransaction();
} }
/** /**
@ -774,10 +778,11 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
if (theResultIter.hasNext() == false) { if (theResultIter.hasNext() == false) {
int skippedCount = theResultIter.getSkippedCount(); int skippedCount = theResultIter.getSkippedCount();
int nonSkippedCount = theResultIter.getNonSkippedCount();
int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass; int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass;
ourLog.trace("MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", myMaxResultsToFetch, skippedCount, myCountSavedThisPass, myCountSavedTotal, myAdditionalPrefetchThresholdsRemaining); ourLog.trace("MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", myMaxResultsToFetch, skippedCount, myCountSavedThisPass, myCountSavedTotal, myAdditionalPrefetchThresholdsRemaining);
if (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch) { if (nonSkippedCount == 0 || (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) {
ourLog.trace("Setting search status to FINISHED"); ourLog.trace("Setting search status to FINISHED");
mySearch.setStatus(SearchStatusEnum.FINISHED); mySearch.setStatus(SearchStatusEnum.FINISHED);
mySearch.setTotalCount(myCountSavedTotal); mySearch.setTotalCount(myCountSavedTotal);
@ -840,7 +845,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
@Override @Override
public Void call() { public Void call() {
StopWatch sw = new StopWatch(); StopWatch sw = new StopWatch();
Span span = myParentTransaction.startSpan("db", "query", "search");
span.setName("FHIR Database Search");
try { try {
// Create an initial search in the DB and give it an ID // Create an initial search in the DB and give it an ID
saveSearch(); saveSearch();
@ -896,7 +902,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
ourLog.error("Failed during search loading after {}ms", sw.getMillis(), t); ourLog.error("Failed during search loading after {}ms", sw.getMillis(), t);
} }
myUnsyncedPids.clear(); myUnsyncedPids.clear();
Throwable rootCause = ExceptionUtils.getRootCause(t); Throwable rootCause = ExceptionUtils.getRootCause(t);
rootCause = defaultIfNull(rootCause, t); rootCause = defaultIfNull(rootCause, t);
@ -923,12 +928,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params); JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params);
saveSearch(); saveSearch();
span.captureException(t);
} finally { } finally {
myIdToSearchTask.remove(mySearch.getUuid()); myIdToSearchTask.remove(mySearch.getUuid());
myInitialCollectionLatch.countDown(); myInitialCollectionLatch.countDown();
markComplete(); markComplete();
span.end();
} }
return null; return null;

View File

@ -66,6 +66,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.hibernate.ScrollMode; import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults; import org.hibernate.ScrollableResults;
import org.hibernate.search.jpa.FullTextEntityManager; import org.hibernate.search.jpa.FullTextEntityManager;
@ -600,11 +601,16 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc, ApplicationCo
.map(t -> new Term("myCode", t)) .map(t -> new Term("myCode", t))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (codes.size() > 0) { if (codes.size() > 0) {
MultiPhraseQuery query = new MultiPhraseQuery();
query.add(codes.toArray(new Term[0])); BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(1);
for (Term nextCode : codes) {
builder.add(new TermQuery(nextCode), BooleanClause.Occur.SHOULD);
}
luceneQuery = new BooleanQuery.Builder() luceneQuery = new BooleanQuery.Builder()
.add(luceneQuery, BooleanClause.Occur.MUST) .add(luceneQuery, BooleanClause.Occur.MUST)
.add(query, BooleanClause.Occur.MUST) .add(builder.build(), BooleanClause.Occur.MUST)
.build(); .build();
} }
@ -1240,6 +1246,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc, ApplicationCo
return retVal; return retVal;
} }
@Transactional
@Override @Override
public List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode) { public List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode) {
TermCodeSystem cs = getCodeSystem(theSystem); TermCodeSystem cs = getCodeSystem(theSystem);
@ -1271,6 +1278,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc, ApplicationCo
return retVal; return retVal;
} }
@Transactional
@Override @Override
public List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode) { public List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode) {
TermCodeSystem cs = getCodeSystem(theSystem); TermCodeSystem cs = getCodeSystem(theSystem);

View File

@ -45,7 +45,6 @@ import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc; import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@ -53,11 +52,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil; import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
@ -78,11 +73,6 @@ import javax.annotation.Nonnull;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType; import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -165,11 +155,12 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
IIdType codeSystemId = cs.getResource().getIdDt(); IIdType codeSystemId = cs.getResource().getIdDt();
UploadStatistics retVal = new UploadStatistics(codeSystemId); UploadStatistics retVal = new UploadStatistics(codeSystemId);
HashMap<String, TermConcept> codeToConcept = new HashMap<>();
// Add root concepts // Add root concepts
for (TermConcept nextRootConcept : theAdditions.getRootConcepts()) { for (TermConcept nextRootConcept : theAdditions.getRootConcepts()) {
List<String> parentCodes = Collections.emptyList(); List<String> parentCodes = Collections.emptyList();
addConcept(csv, parentCodes, nextRootConcept, retVal, true, 0); addConceptInHierarchy(csv, parentCodes, nextRootConcept, retVal, codeToConcept, 0);
} }
return retVal; return retVal;
@ -232,8 +223,16 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
* save parent concepts first (it's way too slow to do that) * save parent concepts first (it's way too slow to do that)
*/ */
if (theConcept.getId() == null) { if (theConcept.getId() == null) {
boolean needToSaveParents = false;
for (TermConceptParentChildLink next : theConcept.getParents()) {
if (next.getParent().getId() == null) {
needToSaveParents = true;
}
}
if (needToSaveParents) {
retVal += ensureParentsSaved(theConcept.getParents()); retVal += ensureParentsSaved(theConcept.getParents());
} }
}
if (theConcept.getId() == null || theConcept.getIndexStatus() == null) { if (theConcept.getId() == null || theConcept.getIndexStatus() == null) {
retVal++; retVal++;
@ -445,66 +444,68 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode"); Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode");
} }
private void addConcept(TermCodeSystemVersion theCsv, Collection<String> theParentCodes, TermConcept theConceptToAdd, UploadStatistics theStatisticsTracker, boolean theRootConcept, int theSequence) { private void addConceptInHierarchy(TermCodeSystemVersion theCsv, Collection<String> theParentCodes, TermConcept theConceptToAdd, UploadStatistics theStatisticsTracker, Map<String, TermConcept> theCodeToConcept, int theSequence) {
TermConcept conceptToAdd = theConceptToAdd; TermConcept conceptToAdd = theConceptToAdd;
List<TermConceptParentChildLink> childrenToAdd = theConceptToAdd.getChildren(); List<TermConceptParentChildLink> childrenToAdd = theConceptToAdd.getChildren();
String nextCodeToAdd = conceptToAdd.getCode(); String nextCodeToAdd = conceptToAdd.getCode();
String parentDescription = "(root concept)"; String parentDescription = "(root concept)";
Set<TermConcept> parentConcepts = new HashSet<>();
if (!theParentCodes.isEmpty()) {
parentDescription = "[" + String.join(", ", theParentCodes) + "]";
for (String nextParentCode : theParentCodes) {
Optional<TermConcept> nextParentOpt = myConceptDao.findByCodeSystemAndCode(theCsv, nextParentCode);
if (nextParentOpt.isPresent() == false) {
throw new InvalidRequestException("Unable to add code \"" + nextCodeToAdd + "\" to unknown parent: " + nextParentCode);
}
parentConcepts.add(nextParentOpt.get());
}
}
ourLog.info("Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription); ourLog.info("Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription);
Optional<TermConcept> existingCodeOpt = myConceptDao.findByCodeSystemAndCode(theCsv, nextCodeToAdd); Optional<TermConcept> existingCodeOpt = myConceptDao.findByCodeSystemAndCode(theCsv, nextCodeToAdd);
List<TermConceptParentChildLink> existingParentLinks;
if (existingCodeOpt.isPresent()) { if (existingCodeOpt.isPresent()) {
TermConcept existingCode = existingCodeOpt.get(); TermConcept existingCode = existingCodeOpt.get();
existingCode.setIndexStatus(null); existingCode.setIndexStatus(null);
existingCode.setDisplay(conceptToAdd.getDisplay()); existingCode.setDisplay(conceptToAdd.getDisplay());
conceptToAdd = existingCode; conceptToAdd = existingCode;
existingParentLinks = conceptToAdd.getParents();
} else {
existingParentLinks = Collections.emptyList();
}
Set<TermConcept> parentConceptsWeShouldLinkTo = new HashSet<>();
for (String nextParentCode : theParentCodes) {
// Don't add parent links that already exist for the code
if (existingParentLinks.stream().anyMatch(t -> t.getParent().getCode().equals(nextParentCode))) {
continue;
}
TermConcept nextParentOpt = theCodeToConcept.get(nextParentCode);
if (nextParentOpt == null) {
nextParentOpt = myConceptDao.findByCodeSystemAndCode(theCsv, nextParentCode).orElse(null);
}
if (nextParentOpt == null) {
throw new InvalidRequestException("Unable to add code \"" + nextCodeToAdd + "\" to unknown parent: " + nextParentCode);
}
parentConceptsWeShouldLinkTo.add(nextParentOpt);
} }
if (conceptToAdd.getSequence() == null) { if (conceptToAdd.getSequence() == null) {
conceptToAdd.setSequence(theSequence); conceptToAdd.setSequence(theSequence);
} }
// Drop any old parent-child links if they aren't explicitly specified in the
// hierarchy being added
if (!theRootConcept) {
for (Iterator<TermConceptParentChildLink> iter = conceptToAdd.getParents().iterator(); iter.hasNext(); ) {
TermConceptParentChildLink nextLink = iter.next();
String parentCode = nextLink.getParent().getCode();
ourLog.info("Dropping existing parent/child link from {} -> {}", parentCode, nextCodeToAdd);
myConceptParentChildLinkDao.delete(nextLink);
iter.remove();
List<TermConceptParentChildLink> parentChildrenList = nextLink.getParent().getChildren();
parentChildrenList.remove(nextLink);
}
}
// Null out the hierarchy PIDs for this concept always. We do this because we're going to // Null out the hierarchy PIDs for this concept always. We do this because we're going to
// force a reindex, and it'll be regenerated then // force a reindex, and it'll be regenerated then
conceptToAdd.setParentPids(null); conceptToAdd.setParentPids(null);
conceptToAdd.setCodeSystemVersion(theCsv); conceptToAdd.setCodeSystemVersion(theCsv);
conceptToAdd = myConceptDao.save(conceptToAdd);
if (theStatisticsTracker.getUpdatedConceptCount() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
saveConcept(conceptToAdd);
Long nextConceptPid = conceptToAdd.getId(); Long nextConceptPid = conceptToAdd.getId();
Validate.notNull(nextConceptPid); Validate.notNull(nextConceptPid);
} else {
myDeferredStorageSvc.addConceptToStorageQueue(conceptToAdd);
}
theCodeToConcept.put(conceptToAdd.getCode(), conceptToAdd);
theStatisticsTracker.incrementUpdatedConceptCount(); theStatisticsTracker.incrementUpdatedConceptCount();
// Add link to new child to the parent // Add link to new child to the parent
for (TermConcept nextParentConcept : parentConcepts) { for (TermConcept nextParentConcept : parentConceptsWeShouldLinkTo) {
TermConceptParentChildLink parentLink = new TermConceptParentChildLink(); TermConceptParentChildLink parentLink = new TermConceptParentChildLink();
parentLink.setParent(nextParentConcept); parentLink.setParent(nextParentConcept);
parentLink.setChild(conceptToAdd); parentLink.setChild(conceptToAdd);
@ -513,7 +514,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
nextParentConcept.getChildren().add(parentLink); nextParentConcept.getChildren().add(parentLink);
conceptToAdd.getParents().add(parentLink); conceptToAdd.getParents().add(parentLink);
ourLog.info("Saving parent/child link - Parent[{}] Child[{}]", parentLink.getParent().getCode(), parentLink.getChild().getCode()); ourLog.info("Saving parent/child link - Parent[{}] Child[{}]", parentLink.getParent().getCode(), parentLink.getChild().getCode());
if (theStatisticsTracker.getUpdatedConceptCount() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
myConceptParentChildLinkDao.save(parentLink); myConceptParentChildLinkDao.save(parentLink);
} else {
myDeferredStorageSvc.addConceptLinkToStorageQueue(parentLink);
}
} }
ourLog.trace("About to save parent-child links"); ourLog.trace("About to save parent-child links");
@ -527,13 +534,20 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
for (int i = 0; i < nextChild.getParents().size(); i++) { for (int i = 0; i < nextChild.getParents().size(); i++) {
if (nextChild.getParents().get(i).getId() == null) { if (nextChild.getParents().get(i).getId() == null) {
String parentCode = nextChild.getParents().get(i).getParent().getCode(); String parentCode = nextChild.getParents().get(i).getParent().getCode();
TermConcept parentConcept = myConceptDao.findByCodeSystemAndCode(theCsv, parentCode).orElseThrow(() -> new IllegalArgumentException("Unknown parent code: " + parentCode)); TermConcept parentConcept = theCodeToConcept.get(parentCode);
if (parentConcept == null) {
parentConcept = myConceptDao.findByCodeSystemAndCode(theCsv, parentCode).orElse(null);
}
if (parentConcept == null) {
throw new IllegalArgumentException("Unknown parent code: " + parentCode);
}
nextChild.getParents().get(i).setParent(parentConcept); nextChild.getParents().get(i).setParent(parentConcept);
} }
} }
Collection<String> parentCodes = nextChild.getParents().stream().map(t -> t.getParent().getCode()).collect(Collectors.toList()); Collection<String> parentCodes = nextChild.getParents().stream().map(t -> t.getParent().getCode()).collect(Collectors.toList());
addConcept(theCsv, parentCodes, nextChild, theStatisticsTracker, false, childIndex); addConceptInHierarchy(theCsv, parentCodes, nextChild, theStatisticsTracker, theCodeToConcept, childIndex);
childIndex++; childIndex++;
} }
@ -649,12 +663,15 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) { private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) {
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) { for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter); deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter);
myConceptParentChildLinkDao.delete(nextChildLink);
} }
myConceptParentChildLinkDao.deleteByConceptPid(theConcept.getId());
myConceptDesignationDao.deleteAll(theConcept.getDesignations()); myConceptDesignationDao.deleteAll(theConcept.getDesignations());
myConceptPropertyDao.deleteAll(theConcept.getProperties()); myConceptPropertyDao.deleteAll(theConcept.getProperties());
myConceptDao.delete(theConcept);
ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode());
myConceptDao.deleteByPid(theConcept.getId());
theRemoveCounter.incrementAndGet(); theRemoveCounter.incrementAndGet();
} }

View File

@ -98,6 +98,13 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
myDeferredValueSets.addAll(theValueSets); myDeferredValueSets.addAll(theValueSets);
} }
@Override
public void saveAllDeferred() {
while (!isStorageQueueEmpty()) {
saveDeferred();
}
}
@Override @Override
public void setProcessDeferred(boolean theProcessDeferred) { public void setProcessDeferred(boolean theProcessDeferred) {
myProcessDeferred = theProcessDeferred; myProcessDeferred = theProcessDeferred;

View File

@ -9,10 +9,13 @@ import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent; import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.StructureDefinition;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent; import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
@ -31,6 +34,9 @@ import java.util.List;
import java.util.Optional; import java.util.Optional;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSetExpansionComponent;
/* /*
* #%L * #%L
@ -104,7 +110,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand); valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null).getExpansion(); org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null).getExpansion();
return VersionConvertor_30_40.convertValueSetExpansionComponent(expandedR4); return convertValueSetExpansionComponent(expandedR4);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -118,7 +124,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand); valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null); org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null);
return VersionConvertor_30_40.convertValueSet(expandedR4); return convertValueSet(expandedR4);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -127,7 +133,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
@Override @Override
protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws FHIRException { protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws FHIRException {
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet((ValueSet) theValueSet); valueSetToExpandR4 = convertValueSet((ValueSet) theValueSet);
return valueSetToExpandR4; return valueSetToExpandR4;
} }
@ -139,7 +145,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand); valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount); org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount);
return VersionConvertor_30_40.convertValueSet(expandedR4); return convertValueSet(expandedR4);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -256,7 +262,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) { public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
CodeSystem codeSystem = myValidationSupport.fetchCodeSystem(myContext, theSystem); CodeSystem codeSystem = myValidationSupport.fetchCodeSystem(myContext, theSystem);
try { try {
return VersionConvertor_30_40.convertCodeSystem(codeSystem); return convertCodeSystem(codeSystem);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -323,7 +329,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null"); ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null");
ValueSet valueSet = (ValueSet) theValueSet; ValueSet valueSet = (ValueSet) theValueSet;
org.hl7.fhir.r4.model.ValueSet valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet); org.hl7.fhir.r4.model.ValueSet valueSetR4 = convertValueSet(valueSet);
Coding coding = (Coding) theCoding; Coding coding = (Coding) theCoding;
org.hl7.fhir.r4.model.Coding codingR4 = null; org.hl7.fhir.r4.model.Coding codingR4 = null;
@ -347,7 +353,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public boolean isValueSetPreExpandedForCodeValidation(IBaseResource theValueSet) { public boolean isValueSetPreExpandedForCodeValidation(IBaseResource theValueSet) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null"); ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null");
ValueSet valueSet = (ValueSet) theValueSet; ValueSet valueSet = (ValueSet) theValueSet;
org.hl7.fhir.r4.model.ValueSet valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet); org.hl7.fhir.r4.model.ValueSet valueSetR4 = convertValueSet(valueSet);
return super.isValueSetPreExpandedForCodeValidation(valueSetR4); return super.isValueSetPreExpandedForCodeValidation(valueSetR4);
} }
} }

View File

@ -90,21 +90,21 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
super.throwInvalidValueSet(theValueSet); super.throwInvalidValueSet(theValueSet);
} }
return expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR5), null); return expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR5), null);
} }
@Override @Override
public IBaseResource expandValueSet(IBaseResource theInput) { public IBaseResource expandValueSet(IBaseResource theInput) {
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput); org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSetInMemory(valueSetToExpand, null); org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSetInMemory(valueSetToExpand, null);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR4);
} }
@Override @Override
public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) {
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput); org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount); org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR4);
} }
@Override @Override
@ -117,8 +117,8 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
public ValueSetExpander.ValueSetExpansionOutcome expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) { public ValueSetExpander.ValueSetExpansionOutcome expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSet valueSetToExpand = new ValueSet(); ValueSet valueSetToExpand = new ValueSet();
valueSetToExpand.getCompose().addInclude(theInclude); valueSetToExpand.getCompose().addInclude(theInclude);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetToExpand), null); org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetToExpand), null);
return new ValueSetExpander.ValueSetExpansionOutcome(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(expandedR4)); return new ValueSetExpander.ValueSetExpansionOutcome(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(expandedR4));
} }
@Override @Override
@ -199,13 +199,13 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
@Override @Override
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) { public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
CodeSystem codeSystemR5 = myValidationSupport.fetchCodeSystem(myContext, theSystem); CodeSystem codeSystemR5 = myValidationSupport.fetchCodeSystem(myContext, theSystem);
return org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(codeSystemR5); return org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(codeSystemR5);
} }
@Override @Override
protected org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) { protected org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) {
ValueSet valueSetR5 = myValueSetResourceDao.toResource(ValueSet.class, theResourceTable, null, false); ValueSet valueSetR5 = myValueSetResourceDao.toResource(ValueSet.class, theResourceTable, null, false);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR5); return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR5);
} }
@Override @Override
@ -277,7 +277,7 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
@Override @Override
protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws org.hl7.fhir.exceptions.FHIRException { protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws org.hl7.fhir.exceptions.FHIRException {
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSet); return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet((ValueSet) theValueSet);
} }
@Override @Override

View File

@ -37,6 +37,9 @@ import org.springframework.context.event.ContextStartedEvent;
import org.springframework.context.event.EventListener; import org.springframework.context.event.EventListener;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc { public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
@ -72,7 +75,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) { public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
CodeSystem resourceToStore; CodeSystem resourceToStore;
try { try {
resourceToStore = VersionConvertor_30_40.convertCodeSystem(theCodeSystemResource); resourceToStore = convertCodeSystem(theCodeSystemResource);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -89,7 +92,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) { public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
ConceptMap resourceToStore; ConceptMap resourceToStore;
try { try {
resourceToStore = VersionConvertor_30_40.convertConceptMap(theConceptMap); resourceToStore = convertConceptMap(theConceptMap);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }
@ -105,7 +108,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) { public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
ValueSet valueSetDstu3; ValueSet valueSetDstu3;
try { try {
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet); valueSetDstu3 = convertValueSet(theValueSet);
} catch (FHIRException e) { } catch (FHIRException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }

View File

@ -62,7 +62,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) { public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
validateCodeSystemForStorage(theCodeSystemResource); validateCodeSystemForStorage(theCodeSystemResource);
CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(theCodeSystemResource); CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(theCodeSystemResource);
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) { if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
return myCodeSystemResourceDao.update(codeSystemR4, matchUrl).getId(); return myCodeSystemResourceDao.update(codeSystemR4, matchUrl).getId();
@ -74,7 +74,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
@Override @Override
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) { public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(theConceptMap); ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(theConceptMap);
if (isBlank(theConceptMap.getIdElement().getIdPart())) { if (isBlank(theConceptMap.getIdElement().getIdPart())) {
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl()); String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
@ -87,7 +87,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
@Override @Override
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) { public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(theValueSet); ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(theValueSet);
if (isBlank(theValueSet.getIdElement().getIdPart())) { if (isBlank(theValueSet.getIdElement().getIdPart())) {
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl()); String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());

View File

@ -50,4 +50,9 @@ public interface ITermDeferredStorageSvc {
void addConceptMapsToStorageQueue(List<ConceptMap> theConceptMaps); void addConceptMapsToStorageQueue(List<ConceptMap> theConceptMaps);
void addValueSetsToStorageQueue(List<ValueSet> theValueSets); void addValueSetsToStorageQueue(List<ValueSet> theValueSets);
/**
* This is mostly here for unit tests - Saves any and all deferred concepts and links
*/
void saveAllDeferred();
} }

View File

@ -39,11 +39,10 @@ public class SearchBuilderTest {
TypedQuery mockQuery = mock(TypedQuery.class); TypedQuery mockQuery = mock(TypedQuery.class);
when(mockEntityManager.createQuery(any(), any())).thenReturn(mockQuery); when(mockEntityManager.createQuery(any(), any())).thenReturn(mockQuery);
List<ResourceLink> resultList = new ArrayList<>(); List<Long> resultList = new ArrayList<>();
ResourceLink link = new ResourceLink(); Long link = 1L;
ResourceTable target = new ResourceTable(); ResourceTable target = new ResourceTable();
target.setId(1L); target.setId(1L);
link.setTargetResource(target);
resultList.add(link); resultList.add(link);
when(mockQuery.getResultList()).thenReturn(resultList); when(mockQuery.getResultList()).thenReturn(resultList);

View File

@ -1,9 +1,5 @@
package ca.uhn.fhir.jpa.dao.dstu2; package ca.uhn.fhir.jpa.dao.dstu2;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
@ -32,7 +28,11 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Collections;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test { public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class);
@ -94,15 +94,14 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
myStructureDefinitionDao.create(sd, mySrd); myStructureDefinitionDao.create(sd, mySrd);
Observation input = new Observation(); Observation input = new Observation();
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(sd.getUrl()))); ResourceMetadataKeyEnum.PROFILES.put(input, Collections.singletonList(new IdDt(sd.getUrl())));
input.addIdentifier().setSystem("http://acme").setValue("12345"); input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9"); input.getEncounter().setReference("http://foo.com/Encounter/9");
input.setStatus(ObservationStatusEnum.FINAL); input.setStatus(ObservationStatusEnum.FINAL);
input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345"); input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345");
String encoded = null; String encoded;
MethodOutcome outcome = null;
ValidationModeEnum mode = ValidationModeEnum.CREATE; ValidationModeEnum mode = ValidationModeEnum.CREATE;
switch (enc) { switch (enc) {
case JSON: case JSON:
@ -130,12 +129,12 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
} }
@Test @Test
public void testValidateResourceContainingProfileDeclarationInvalid() throws Exception { public void testValidateResourceContainingProfileDeclarationInvalid() {
String methodName = "testValidateResourceContainingProfileDeclarationInvalid"; String methodName = "testValidateResourceContainingProfileDeclarationInvalid";
Observation input = new Observation(); Observation input = new Observation();
String profileUri = "http://example.com/StructureDefinition/" + methodName; String profileUri = "http://example.com/StructureDefinition/" + methodName;
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(profileUri))); ResourceMetadataKeyEnum.PROFILES.put(input, Collections.singletonList(new IdDt(profileUri)));
input.addIdentifier().setSystem("http://acme").setValue("12345"); input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9"); input.getEncounter().setReference("http://foo.com/Encounter/9");
@ -148,7 +147,7 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getOperationOutcome()); String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getOperationOutcome());
ourLog.info(ooString); ourLog.info(ooString);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved")); assertThat(ooString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
} }

View File

@ -35,7 +35,6 @@ import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.hibernate.search.jpa.FullTextEntityManager; import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search; import org.hibernate.search.jpa.Search;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
@ -60,6 +59,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.Map; import java.util.Map;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@RunWith(SpringJUnit4ClassRunner.class) @RunWith(SpringJUnit4ClassRunner.class)
@ -386,7 +386,7 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
*/ */
public static ConceptMap createConceptMap() { public static ConceptMap createConceptMap() {
try { try {
return VersionConvertor_30_40.convertConceptMap(BaseJpaR4Test.createConceptMap()); return convertConceptMap(BaseJpaR4Test.createConceptMap());
} catch (FHIRException fe) { } catch (FHIRException fe) {
throw new InternalErrorException(fe); throw new InternalErrorException(fe);
} }

View File

@ -9,6 +9,7 @@ import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl; import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier; import ca.uhn.fhir.rest.param.TokenParamModifier;
@ -134,8 +135,13 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
} }
myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION" , cs, table); myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION" , cs, table);
myTermDeferredStorageSvc.saveAllDeferred();
} }
@Autowired
private ITermDeferredStorageSvc myTermDeferredStorageSvc;
private void createExternalCsAndLocalVs() { private void createExternalCsAndLocalVs() {
CodeSystem codeSystem = createExternalCs(); CodeSystem codeSystem = createExternalCs();

View File

@ -326,11 +326,11 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd); myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
fail(); fail();
} catch (PreconditionFailedException e) { } catch (PreconditionFailedException e) {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()); OperationOutcome oo = (OperationOutcome) e.getOperationOutcome();
ourLog.info(ooString); String outputString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved")); ourLog.info(outputString);
assertThat(outputString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
} }
} }
@Test @Test

View File

@ -162,6 +162,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myConditionDaoR4") @Qualifier("myConditionDaoR4")
protected IFhirResourceDao<Condition> myConditionDao; protected IFhirResourceDao<Condition> myConditionDao;
@Autowired @Autowired
@Qualifier("myEpisodeOfCareDaoR4")
protected IFhirResourceDao<EpisodeOfCare> myEpisodeOfCareDao;
@Autowired
protected DaoConfig myDaoConfig; protected DaoConfig myDaoConfig;
@Autowired @Autowired
protected ModelConfig myModelConfig; protected ModelConfig myModelConfig;
@ -169,6 +172,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myDeviceDaoR4") @Qualifier("myDeviceDaoR4")
protected IFhirResourceDao<Device> myDeviceDao; protected IFhirResourceDao<Device> myDeviceDao;
@Autowired @Autowired
@Qualifier("myProvenanceDaoR4")
protected IFhirResourceDao<Provenance> myProvenanceDao;
@Autowired
@Qualifier("myDiagnosticReportDaoR4") @Qualifier("myDiagnosticReportDaoR4")
protected IFhirResourceDao<DiagnosticReport> myDiagnosticReportDao; protected IFhirResourceDao<DiagnosticReport> myDiagnosticReportDao;
@Autowired @Autowired

View File

@ -133,6 +133,38 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
} }
@Test
public void testCreateWithClientAssignedId() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
runInTransaction(() -> {
Patient p = new Patient();
p.getMaritalStatus().setText("123");
return myPatientDao.create(p).getId().toUnqualified();
});
myCaptureQueriesListener.clear();
runInTransaction(() -> {
Patient p = new Patient();
p.setId("AAA");
p.getMaritalStatus().setText("123");
return myPatientDao.update(p).getId().toUnqualified();
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(1, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@AfterClass @AfterClass
public static void afterClassClearContext() { public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest(); TestUtil.clearAllStaticFieldsForUnitTest();

View File

@ -3,7 +3,14 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
@ -33,7 +40,11 @@ import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.r4.model.Observation.ObservationStatus; import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType; import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus; import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
import org.junit.*; import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallback;
@ -44,12 +55,30 @@ import javax.servlet.http.HttpServletRequest;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.*; import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.*; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@SuppressWarnings({"unchecked", "Duplicates"}) @SuppressWarnings({"unchecked", "Duplicates"})
@ -73,6 +102,28 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
myDaoConfig.setReuseCachedSearchResultsForMillis(null); myDaoConfig.setReuseCachedSearchResultsForMillis(null);
} }
@Test
public void testCanonicalReference() {
StructureDefinition sd = new StructureDefinition();
sd.getSnapshot().addElement().getBinding().setValueSet("http://foo");
String id = myStructureDefinitionDao.create(sd).getId().toUnqualifiedVersionless().getValue();
{
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(StructureDefinition.SP_VALUESET, new ReferenceParam("http://foo"));
List<String> ids = toUnqualifiedVersionlessIdValues(myStructureDefinitionDao.search(map));
assertThat(ids, contains(id));
}
{
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(StructureDefinition.SP_VALUESET, new ReferenceParam("http://foo2"));
List<String> ids = toUnqualifiedVersionlessIdValues(myStructureDefinitionDao.search(map));
assertThat(ids, empty());
}
}
@Test @Test
public void testHasConditionAgeCompare() { public void testHasConditionAgeCompare() {
Patient patient = new Patient(); Patient patient = new Patient();
@ -569,6 +620,41 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
} }
@Test
public void testHasLimitsByType() {
Patient patient = new Patient();
patient.setActive(true);
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
Encounter encounter = new Encounter();
encounter.setStatus(Encounter.EncounterStatus.ARRIVED);
IIdType encounterId = myEncounterDao.create(encounter).getId().toUnqualifiedVersionless();
Device device = new Device();
device.setManufacturer("Acme");
IIdType deviceId = myDeviceDao.create(device).getId().toUnqualifiedVersionless();
Provenance provenance = new Provenance();
provenance.addTarget().setReferenceElement(patientId);
provenance.addTarget().setReferenceElement(encounterId);
provenance.addAgent().setWho(new Reference(deviceId));
myProvenanceDao.create(provenance);
String criteria = "_has:Provenance:target:agent=" + deviceId.getValue();
SearchParameterMap map = myMatchUrlService.translateMatchUrl(criteria, myFhirCtx.getResourceDefinition(Encounter.class));
map.setLoadSynchronous(true);
myCaptureQueriesListener.clear();
IBundleProvider results = myEncounterDao.search(map);
myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
List<String> ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, containsInAnyOrder(encounterId.getValue()));
}
@Test @Test
public void testHasParameter() { public void testHasParameter() {
IIdType pid0; IIdType pid0;
@ -3258,6 +3344,105 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
} }
} }
@Test
public void testSearchWithDateAndReusesExistingJoin() {
// Add a search parameter to Observation.issued, so that between that one
// and the existing one on Observation.effective, we have 2 date search parameters
// on the same resource
{
SearchParameter sp = new SearchParameter();
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.addBase("Observation");
sp.setType(Enumerations.SearchParamType.DATE);
sp.setCode("issued");
sp.setExpression("Observation.issued");
mySearchParameterDao.create(sp);
mySearchParamRegistry.forceRefresh();
}
// Dates are reversed on these two observations
IIdType obsId1;
{
Observation obs = new Observation();
obs.setIssuedElement(new InstantType("2020-06-06T12:00:00Z"));
obs.setEffective(new InstantType("2019-06-06T12:00:00Z"));
obsId1 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
IIdType obsId2;
{
Observation obs = new Observation();
obs.setIssuedElement(new InstantType("2019-06-06T12:00:00Z"));
obs.setEffective(new InstantType("2020-06-06T12:00:00Z"));
obsId2 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
// Add two with a period
IIdType obsId3;
{
Observation obs = new Observation();
obs.setEffective(new Period().setStartElement(new DateTimeType("2000-06-06T12:00:00Z")).setEndElement(new DateTimeType("2001-06-06T12:00:00Z")));
obsId3 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
IIdType obsId4;
{
Observation obs = new Observation();
obs.setEffective(new Period().setStartElement(new DateTimeType("2001-01-01T12:00:00Z")).setEndElement(new DateTimeType("2002-01-01T12:00:00Z")));
obsId4 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
// Two AND instances of 1 SP
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("issued", new DateParam("ge2020-06-05"));
params.add("issued", new DateParam("lt2020-06-07"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, contains(obsId1));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
// Two AND instances of 1 SP and 1 instance of another
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("issued", new DateParam("ge2020-06-05"));
params.add("issued", new DateParam("lt2020-06-07"));
params.add("date", new DateParam("gt2019-06-05"));
params.add("date", new DateParam("lt2019-06-07"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, contains(obsId1));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 4, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
// Period search
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("date", new DateParam("lt2002-01-01T12:00:00Z"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, containsInAnyOrder(obsId3, obsId4));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
}
@Test @Test
public void testSearchWithFetchSizeDefaultMaximum() { public void testSearchWithFetchSizeDefaultMaximum() {
myDaoConfig.setFetchSizeDefaultMaximum(5); myDaoConfig.setFetchSizeDefaultMaximum(5);
@ -4228,6 +4413,44 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
} }
} }
@Test
public void testCircularReferencesDontBreakRevIncludes() {
Patient p = new Patient();
p.setActive(true);
IIdType patientId = myPatientDao.create(p).getId().toUnqualifiedVersionless();
Encounter enc = new Encounter();
enc.setStatus(Encounter.EncounterStatus.ARRIVED);
enc.getSubject().setReference(patientId.getValue());
IIdType encId = myEncounterDao.create(enc).getId().toUnqualifiedVersionless();
Condition cond = new Condition();
cond.addIdentifier().setSystem("http://foo").setValue("123");
IIdType conditionId = myConditionDao.create(cond).getId().toUnqualifiedVersionless();
EpisodeOfCare ep = new EpisodeOfCare();
ep.setStatus(EpisodeOfCare.EpisodeOfCareStatus.ACTIVE);
IIdType epId = myEpisodeOfCareDao.create(ep).getId().toUnqualifiedVersionless();
enc.getEpisodeOfCareFirstRep().setReference(ep.getId());
myEncounterDao.update(enc);
cond.getEncounter().setReference(enc.getId());
myConditionDao.update(cond);
ep.getDiagnosisFirstRep().getCondition().setReference(cond.getId());
myEpisodeOfCareDao.update(ep);
// Search time
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.addRevInclude(new Include("*").setRecurse(true));
IBundleProvider results = myPatientDao.search(params);
List<String> values = toUnqualifiedVersionlessIdValues(results);
assertThat(values.toString(), values, containsInAnyOrder(patientId.getValue(), encId.getValue(), conditionId.getValue(), epId.getValue()));
}
private String toStringMultiline(List<?> theResults) { private String toStringMultiline(List<?> theResults) {
StringBuilder b = new StringBuilder(); StringBuilder b = new StringBuilder();
for (Object next : theResults) { for (Object next : theResults) {

View File

@ -30,7 +30,6 @@ import org.junit.Test;
import org.springframework.aop.framework.AopProxyUtils; import org.springframework.aop.framework.AopProxyUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean; import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean;
import org.springframework.test.context.TestPropertySource;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -163,7 +162,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
} }
@Test @Test
public void testFetchTotalAccurateForSlowLoading() throws InterruptedException { public void testFetchTotalAccurateForSlowLoading() {
create200Patients(); create200Patients();
mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(25); mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(25);
@ -836,8 +835,6 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
// Because of the forced ID's bidirectional link HFJ_RESOURCE <-> HFJ_FORCED_ID
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
runInTransaction(() -> { runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count()); assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count()); assertEquals(1, myResourceHistoryTableDao.count());

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult; import ca.uhn.fhir.validation.ValidationResult;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.*;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -37,8 +38,10 @@ import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.PlatformTransactionManager;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -146,6 +149,53 @@ public class FhirResourceDaoR4SearchWithElasticSearchTest extends BaseJpaTest {
} }
@Test
public void testExpandVsWithMultiInclude_All() throws IOException {
CodeSystem cs = loadResource(myFhirCtx, CodeSystem.class, "/r4/expand-multi-cs.json");
myCodeSystemDao.update(cs);
ValueSet vs = loadResource(myFhirCtx, ValueSet.class, "/r4/expand-multi-vs-all.json");
ValueSet expanded = myValueSetDao.expand(vs, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expanded));
// All codes
List<String> codes = expanded
.getExpansion()
.getContains()
.stream()
.map(t -> t.getCode())
.sorted()
.collect(Collectors.toList());
assertThat(codes.toString(), codes, Matchers.contains("advice", "message", "note", "notification"));
}
@Test
public void testExpandVsWithMultiInclude_Some() throws IOException {
CodeSystem cs = loadResource(myFhirCtx, CodeSystem.class, "/r4/expand-multi-cs.json");
myCodeSystemDao.update(cs);
ValueSet vs = loadResource(myFhirCtx, ValueSet.class, "/r4/expand-multi-vs-all.json");
vs.getCompose().getInclude().get(0).getConcept().remove(0);
vs.getCompose().getInclude().get(0).getConcept().remove(0);
ValueSet expanded = myValueSetDao.expand(vs, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expanded));
// All codes
List<String> codes = expanded
.getExpansion()
.getContains()
.stream()
.map(t -> t.getCode())
.sorted()
.collect(Collectors.toList());
assertThat(codes.toString(), codes, Matchers.contains("advice", "note"));
}
private CodeSystem createExternalCs() { private CodeSystem createExternalCs() {
CodeSystem codeSystem = new CodeSystem(); CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM); codeSystem.setUrl(URL_MY_CODE_SYSTEM);

View File

@ -164,6 +164,8 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
} }
myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table); myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table);
myTerminologyDeferredStorageSvc.saveAllDeferred();
} }
private void createLocalCsAndVs() { private void createLocalCsAndVs() {

View File

@ -147,6 +147,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
} }
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://loinc.org", codesToAdd); myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://loinc.org", codesToAdd);
myTerminologyDeferredStorageSvc.saveAllDeferred();
// Create a valueset // Create a valueset
ValueSet vs = new ValueSet(); ValueSet vs = new ValueSet();
vs.setUrl("http://example.com/fhir/ValueSet/observation-vitalsignresult"); vs.setUrl("http://example.com/fhir/ValueSet/observation-vitalsignresult");
@ -399,12 +401,11 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd); myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
fail(); fail();
} catch (PreconditionFailedException e) { } catch (PreconditionFailedException e) {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()); org.hl7.fhir.r4.model.OperationOutcome oo = (org.hl7.fhir.r4.model.OperationOutcome) e.getOperationOutcome();
ourLog.info(ooString); String outputString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved")); ourLog.info(outputString);
assertThat(outputString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
} }
} }
@Test @Test
@ -618,6 +619,8 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
upload("/r4/uscore/ValueSet-omb-race-category.json"); upload("/r4/uscore/ValueSet-omb-race-category.json");
upload("/r4/uscore/ValueSet-us-core-usps-state.json"); upload("/r4/uscore/ValueSet-us-core-usps-state.json");
myTerminologyDeferredStorageSvc.saveAllDeferred();
{ {
String resource = loadResource("/r4/uscore/patient-resource-badcode.json"); String resource = loadResource("/r4/uscore/patient-resource-badcode.json");
IBaseResource parsedResource = myFhirCtx.newJsonParser().parseResource(resource); IBaseResource parsedResource = myFhirCtx.newJsonParser().parseResource(resource);

View File

@ -9,20 +9,29 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor; import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import java.util.Date; import java.util.Date;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test { public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(ResourceProviderR4CacheTest.class);
private CapturingInterceptor myCapturingInterceptor; private CapturingInterceptor myCapturingInterceptor;
@Autowired @Autowired
private ISearchDao mySearchEntityDao; private ISearchDao mySearchEntityDao;
@ -184,6 +193,39 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
assertEquals(results1.getId(), results2.getId()); assertEquals(results1.getId(), results2.getId());
} }
@Test
public void testDeletedSearchResultsNotReturnedFromCache() {
Patient p = new Patient();
p.addName().setFamily("Foo");
String p1Id = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
p = new Patient();
p.addName().setFamily("Foo");
String p2Id = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
Bundle resp1 = ourClient
.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("foo"))
.returnBundle(Bundle.class)
.execute();
assertEquals(2, resp1.getEntry().size());
ourClient.delete().resourceById(new IdType(p1Id)).execute();
Bundle resp2 = ourClient
.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("foo"))
.returnBundle(Bundle.class)
.execute();
assertEquals(resp1.getId(), resp2.getId());
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp2));
assertEquals(1, resp2.getEntry().size());
}
@AfterClass @AfterClass
public static void afterClassClearContext() { public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest(); TestUtil.clearAllStaticFieldsForUnitTest();

View File

@ -15,6 +15,7 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler; import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpRequest;
@ -26,6 +27,9 @@ import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import ca.uhn.fhir.validation.SingleValidationMessage;
import ca.uhn.fhir.validation.ValidationResult;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@ -52,6 +56,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType; import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType; import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus; import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
import org.junit.*; import org.junit.*;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.util.AopTestUtils; import org.springframework.test.util.AopTestUtils;
@ -2260,6 +2265,28 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
} }
} }
@Test
public void testValidateResourceContainingProfileDeclarationDoesntResolve() throws IOException {
Observation input = new Observation();
input.getText().setDiv(new XhtmlNode().setValue("<div>AA</div>")).setStatus(Narrative.NarrativeStatus.GENERATED);
input.getMeta().addProfile("http://foo/structuredefinition/myprofile");
input.getCode().setText("Hello");
input.setStatus(ObservationStatus.FINAL);
HttpPost post = new HttpPost(ourServerBase + "/Observation/$validate?_pretty=true");
post.setEntity(new ResourceEntity(myFhirCtx, input));
try (CloseableHttpResponse resp = ourHttpClient.execute(post)) {
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
ourLog.info(respString);
assertEquals(412, resp.getStatusLine().getStatusCode());
assertThat(respString, containsString("Profile reference 'http://foo/structuredefinition/myprofile' could not be resolved, so has not been checked"));
}
}
@SuppressWarnings("unused") @SuppressWarnings("unused")
@Test @Test
public void testFullTextSearch() throws Exception { public void testFullTextSearch() throws Exception {

View File

@ -182,33 +182,11 @@ public class SearchCoordinatorSvcImplTest {
when(mySearchBuilder.createQuery(any(), any(), any())).thenReturn(iter); when(mySearchBuilder.createQuery(any(), any(), any())).thenReturn(iter);
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any()); doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenAnswer(t -> {
List<ResourcePersistentId> returnedValues = iter.getReturnedValues();
int offset = t.getArgument(1, Integer.class);
int end = t.getArgument(2, Integer.class);
end = Math.min(end, returnedValues.size());
offset = Math.min(offset, returnedValues.size());
ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
return returnedValues.subList(offset, end);
});
when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(allResults);
when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{
Search search = t.getArgument(0, Search.class);
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
search.setStatus(SearchStatusEnum.LOADING);
return Optional.of(search);
});
when(mySearchCacheSvc.save(any())).thenAnswer(t -> { when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
Search search = t.getArgument(0, Search.class); Search search = t.getArgument(0, Search.class);
myCurrentSearch = search; myCurrentSearch = search;
return search; return search;
}); });
when(mySearchCacheSvc.fetchByUuid(any())).thenAnswer(t -> Optional.ofNullable(myCurrentSearch));
IFhirResourceDao dao = myCallingDao;
when(myDaoRegistry.getResourceDao(any(String.class))).thenReturn(dao);
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null); IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
assertNotNull(result.getUuid()); assertNotNull(result.getUuid());
@ -602,6 +580,11 @@ public class SearchCoordinatorSvcImplTest {
return myWrap.getSkippedCount(); return myWrap.getSkippedCount();
} }
@Override
public int getNonSkippedCount() {
return myCount;
}
@Override @Override
public void close() { public void close() {
// nothing // nothing
@ -611,6 +594,7 @@ public class SearchCoordinatorSvcImplTest {
public static class ResultIterator extends BaseIterator<ResourcePersistentId> implements IResultIterator { public static class ResultIterator extends BaseIterator<ResourcePersistentId> implements IResultIterator {
private final Iterator<ResourcePersistentId> myWrap; private final Iterator<ResourcePersistentId> myWrap;
private int myCount;
ResultIterator(Iterator<ResourcePersistentId> theWrap) { ResultIterator(Iterator<ResourcePersistentId> theWrap) {
myWrap = theWrap; myWrap = theWrap;
@ -623,6 +607,7 @@ public class SearchCoordinatorSvcImplTest {
@Override @Override
public ResourcePersistentId next() { public ResourcePersistentId next() {
myCount++;
return myWrap.next(); return myWrap.next();
} }
@ -631,6 +616,11 @@ public class SearchCoordinatorSvcImplTest {
return 0; return 0;
} }
@Override
public int getNonSkippedCount() {
return myCount;
}
@Override @Override
public void close() { public void close() {
// nothing // nothing
@ -697,6 +687,11 @@ public class SearchCoordinatorSvcImplTest {
} }
} }
@Override
public int getNonSkippedCount() {
return 0;
}
@Override @Override
public void close() { public void close() {
// nothing // nothing

View File

@ -1,10 +1,12 @@
package ca.uhn.fhir.jpa.term; package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.support.IContextValidationSupport; import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.param.UriParam;
@ -16,25 +18,36 @@ import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.apache.commons.lang3.StringUtils.leftPad; import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.matchesPattern;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
public class TerminologySvcDeltaR4Test extends BaseJpaR4Test { public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(TerminologySvcDeltaR4Test.class); private static final Logger ourLog = LoggerFactory.getLogger(TerminologySvcDeltaR4Test.class);
@After
public void after() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(new DaoConfig().getDeferIndexingForCodesystemsOfSize());
}
@Test @Test
public void testAddRootConcepts() { public void testAddRootConcepts() {
@ -115,17 +128,23 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
ourLog.info("All concepts: {}", myTermConceptDao.findAll()); ourLog.info("All concepts: {}", myTermConceptDao.findAll());
}); });
myCaptureQueriesListener.clear();
delta = new CustomTerminologySet(); delta = new CustomTerminologySet();
TermConcept root = delta.addRootConcept("RootA", "Root A"); TermConcept root = delta.addRootConcept("RootA", "Root A");
root.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAA").setDisplay("Child AA"); root.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAA").setDisplay("Child AA");
root.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAB").setDisplay("Child AB"); root.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAB").setDisplay("Child AB");
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta); myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
myCaptureQueriesListener.logAllQueriesForCurrentThread();
assertHierarchyContains( assertHierarchyContains(
"RootA seq=0", "RootA seq=0",
" ChildAA seq=0", " ChildAA seq=0",
" ChildAB seq=1", " ChildAB seq=1",
"RootB seq=0" "RootB seq=0"
); );
} }
@Test @Test
@ -156,12 +175,86 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
outcome = myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta); outcome = myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertHierarchyContains( assertHierarchyContains(
"RootA seq=0", "RootA seq=0",
" ChildAA seq=0",
" ChildAAA seq=0",
"RootB seq=0", "RootB seq=0",
" ChildAA seq=0", " ChildAA seq=0",
" ChildAAA seq=0" " ChildAAA seq=0"
); );
assertEquals(2, outcome.getUpdatedConceptCount()); assertEquals(2, outcome.getUpdatedConceptCount());
runInTransaction(() -> {
TermConcept concept = myTermSvc.findCode("http://foo/cs", "ChildAA").orElseThrow(() -> new IllegalStateException());
assertEquals(2, concept.getParents().size());
assertThat(concept.getParentPidsAsString(), matchesPattern("^[0-9]+ [0-9]+$"));
});
}
@Test
public void testReAddingConceptsDoesntRecreateExistingLinks() {
createNotPresentCodeSystem();
assertHierarchyContains();
UploadStatistics outcome;
CustomTerminologySet delta;
myCaptureQueriesListener.clear();
delta = new CustomTerminologySet();
delta.addRootConcept("RootA", "Root A")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAA").setDisplay("Child AA");
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertHierarchyContains(
"RootA seq=0",
" ChildAA seq=0"
);
myCaptureQueriesListener.logDeleteQueries();
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
myCaptureQueriesListener.logInsertQueries();
// 2 concepts, 1 link
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.clear();
delta = new CustomTerminologySet();
delta.addRootConcept("RootA", "Root A")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAA").setDisplay("Child AA")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAAA").setDisplay("Child AAA");
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertHierarchyContains(
"RootA seq=0",
" ChildAA seq=0",
" ChildAAA seq=0"
);
myCaptureQueriesListener.logDeleteQueries();
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
myCaptureQueriesListener.logInsertQueries();
// 1 concept, 1 link
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.clear();
delta = new CustomTerminologySet();
delta.addRootConcept("RootA", "Root A")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAA").setDisplay("Child AA")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAAA").setDisplay("Child AAA")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("ChildAAAA").setDisplay("Child AAAA");
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertHierarchyContains(
"RootA seq=0",
" ChildAA seq=0",
" ChildAAA seq=0",
" ChildAAAA seq=0"
);
myCaptureQueriesListener.logDeleteQueries();
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
myCaptureQueriesListener.logInsertQueries();
// 1 concept, 1 link
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.clear();
} }
@Test @Test
@ -281,6 +374,49 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
} }
@Test
public void testAddLargeHierarchy() {
myDaoConfig.setDeferIndexingForCodesystemsOfSize(5);
createNotPresentCodeSystem();
ValueSet vs;
vs = expandNotPresentCodeSystem();
assertEquals(0, vs.getExpansion().getContains().size());
CustomTerminologySet delta = new CustomTerminologySet();
// Create a nice deep hierarchy
TermConcept concept = delta.addRootConcept("Root", "Root");
int nestedDepth = 10;
for (int i = 0; i < nestedDepth; i++) {
String name = concept.getCode();
concept = concept.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode(name + "0").setDisplay(name + "0");
}
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertFalse(myTermDeferredStorageSvc.isStorageQueueEmpty());
while (!myTermDeferredStorageSvc.isStorageQueueEmpty()) {
myTermDeferredStorageSvc.saveDeferred();
}
List<String> expectedHierarchy = new ArrayList<>();
for (int i = 0; i < nestedDepth + 1; i++) {
String expected = leftPad("", i, " ") +
"Root" +
leftPad("", i, "0") +
" seq=0";
expectedHierarchy.add(expected);
}
assertHierarchyContains(expectedHierarchy.toArray(new String[0]));
}
@Autowired
private ITermDeferredStorageSvc myTermDeferredStorageSvc;
@Test @Test
public void testAddModifiesExistingCodesInPlace() { public void testAddModifiesExistingCodesInPlace() {
@ -307,54 +443,6 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
assertEquals("CODEA1", myTermSvc.lookupCode(myFhirCtx, "http://foo", "codea").getCodeDisplay()); assertEquals("CODEA1", myTermSvc.lookupCode(myFhirCtx, "http://foo", "codea").getCodeDisplay());
} }
@Test
public void testAddRelocateHierarchy() {
createNotPresentCodeSystem();
// Add code hierarchy
CustomTerminologySet delta = new CustomTerminologySet();
TermConcept codeA = delta.addRootConcept("CodeA", "Code A");
TermConcept codeAA = codeA.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeAA").setDisplay("Code AA");
codeAA.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeAAA").setDisplay("Code AAA");
codeAA.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeAAB").setDisplay("Code AAB");
TermConcept codeB = delta.addRootConcept("CodeB", "Code B");
TermConcept codeBA = codeB.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeBA").setDisplay("Code BA");
codeBA.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeBAA").setDisplay("Code BAA");
codeBA.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeBAB").setDisplay("Code BAB");
UploadStatistics outcome = myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertEquals(8, outcome.getUpdatedConceptCount());
assertHierarchyContains(
"CodeA seq=0",
" CodeAA seq=0",
" CodeAAA seq=0",
" CodeAAB seq=1",
"CodeB seq=0",
" CodeBA seq=0",
" CodeBAA seq=0",
" CodeBAB seq=1"
);
// Move a single child code to a new spot and make sure the hierarchy comes along
// for the ride..
delta = new CustomTerminologySet();
delta
.addRootConcept("CodeB", "Code B")
.addChild(TermConceptParentChildLink.RelationshipTypeEnum.ISA).setCode("CodeAA").setDisplay("Code AA");
outcome = myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertEquals(2, outcome.getUpdatedConceptCount());
assertHierarchyContains(
"CodeA seq=0",
"CodeB seq=0",
" CodeBA seq=0",
" CodeBAA seq=0",
" CodeBAB seq=1",
" CodeAA seq=0", // <-- CodeAA got added here so it comes second
" CodeAAA seq=0",
" CodeAAB seq=1"
);
}
@Test @Test
@Ignore @Ignore
public void testAddWithPropertiesAndDesignations() { public void testAddWithPropertiesAndDesignations() {
@ -464,9 +552,15 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
assertEquals(true, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent())); assertEquals(true, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent()));
// Remove CodeA // Remove CodeA
delta = new CustomTerminologySet(); myCaptureQueriesListener.clear();
delta.addRootConcept("codeA"); runInTransaction(()->{
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsRemove("http://foo/cs", delta); CustomTerminologySet delta2 = new CustomTerminologySet();
delta2.addRootConcept("codeA");
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsRemove("http://foo/cs", delta2);
});
myCaptureQueriesListener.logAllQueriesForCurrentThread();
ourLog.info("*** Done removing");
assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeB").isPresent())); assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeB").isPresent()));
assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeA").isPresent())); assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeA").isPresent()));

Some files were not shown because too many files have changed in this diff Show More