Merge remote-tracking branch 'remotes/origin/master' into im_20200131_remove_circular_dependency_forcedid

# Conflicts:
#	hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
This commit is contained in:
ianmarshall 2020-02-18 12:13:00 -05:00
commit f7db280099
179 changed files with 1851 additions and 1254 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -1,7 +1,7 @@
package example;
import org.hl7.fhir.converter.NullVersionConverterAdvisor30;
import org.hl7.fhir.convertors.*;
import org.hl7.fhir.convertors.conv10_30.Observation10_30;
import org.hl7.fhir.convertors.conv14_30.Questionnaire14_30;
import org.hl7.fhir.exceptions.FHIRException;
public class ConverterExamples {
@ -9,16 +9,12 @@ public class ConverterExamples {
@SuppressWarnings("unused")
public void c1020() throws FHIRException {
//START SNIPPET: 1020
// Create a converter
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
// Create an input resource to convert
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
// Convert the resource
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input);
org.hl7.fhir.dstu3.model.Observation output = Observation10_30.convertObservation(input);
String context = output.getContext().getReference();
//END SNIPPET: 1020
}
@ -31,7 +27,7 @@ public class ConverterExamples {
input.setTitle("My title");
// Convert the resource
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input);
org.hl7.fhir.dstu3.model.Questionnaire output = Questionnaire14_30.convertQuestionnaire(input);
String context = output.getTitle();
//END SNIPPET: 1420
}

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -206,65 +206,4 @@
</resources>
</build>
<profiles>
<profile>
<id>MINI</id>
</profile>
<profile>
<id>SITE</id>
<reporting>
<plugins>
<!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId>
<reportSets> <reportSet> <reports> <report>checkstyle</report> </reports>
</reportSet> </reportSets> <configuration> <linkXRef>false</linkXRef> <sourceDirectories>
<sourceDirectory>hapi-fhir-base/src/main/java</sourceDirectory> </sourceDirectories>
</configuration> </plugin> -->
<!--<plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>findbugs-maven-plugin</artifactId>
<version>3.0.0</version> <configuration> </configuration> </plugin> -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jxr-plugin</artifactId>
<reportSets>
<reportSet>
<id>normal</id>
<reports>
<report>jxr</report>
</reports>
</reportSet>
<!-- <reportSet> <id>restful-server-example</id> <reports> <report>jxr</report>
</reports> <configuration> <sourcePath>../restful-server-example/src/main/java</sourcePath>
<destDir>${project.reporting.outputDirectory}/rse-xref</destDir> <outputDirectory>tmp</outputDirectory>
<reportOutputDirectory>rse-xref</reportOutputDirectory> </configuration>
</reportSet> -->
</reportSets>
</plugin>
<!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-linkcheck-plugin</artifactId>
<version>1.1</version> <configuration> <forceSite>false</forceSite> </configuration>
</plugin> -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<reportSets>
<reportSet>
<reports>
<report>checkstyle</report>
</reports>
</reportSet>
</reportSets>
<configuration>
<linkXRef>false</linkXRef>
<configLocation>${project.basedir}/../src/checkstyle/checkstyle.xml</configLocation>
<!--<sourceDirectories> <sourceDirectory> ${project.basedir}/../hapi-fhir-base/src/main/java
</sourceDirectory> </sourceDirectories> -->
</configuration>
</plugin>
</plugins>
</reporting>
</profile>
</profiles>
</project>

View File

@ -62,41 +62,41 @@ public class ParameterUtil {
*/
public static IQueryParameterAnd<?> parseQueryParams(FhirContext theContext, RestSearchParameterTypeEnum paramType,
String theUnqualifiedParamName, List<QualifiedParamList> theParameters) {
QueryParameterAndBinder binder = null;
QueryParameterAndBinder binder;
switch (paramType) {
case COMPOSITE:
throw new UnsupportedOperationException();
case DATE:
binder = new QueryParameterAndBinder(DateAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case NUMBER:
binder = new QueryParameterAndBinder(NumberAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case QUANTITY:
binder = new QueryParameterAndBinder(QuantityAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case REFERENCE:
binder = new QueryParameterAndBinder(ReferenceAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case STRING:
binder = new QueryParameterAndBinder(StringAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case TOKEN:
binder = new QueryParameterAndBinder(TokenAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case URI:
binder = new QueryParameterAndBinder(UriAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case HAS:
binder = new QueryParameterAndBinder(HasAndListParam.class,
Collections.<Class<? extends IQueryParameterType>>emptyList());
Collections.emptyList());
break;
case SPECIAL:
binder = new QueryParameterAndBinder(SpecialAndListParam.class,
@ -106,7 +106,6 @@ public class ParameterUtil {
throw new IllegalArgumentException("Parameter '" + theUnqualifiedParamName + "' has type " + paramType + " which is currently not supported.");
}
// FIXME null access
return binder.parse(theContext, theUnqualifiedParamName, theParameters);
}

View File

@ -76,24 +76,6 @@ public class ElementUtil {
return true;
}
/*
public static <T> void validateAllElementsAreOfTypeOrThrowClassCastExceptionForModelSetter(List<T> theList, Class<T> theType) {
if (theList == null) {
return;
}
for (T next : theList) {
if (next != null && theType.isAssignableFrom(next.getClass()) == false) {
StringBuilder b = new StringBuilder();
b.append("Failed to set invalid value, found element in list of type ");
b.append(next.getClass().getSimpleName());
b.append(" but expected ");
b.append(theType.getName());
throw new ClassCastException(b.toString());
}
}
}
*/
public static boolean isEmpty(List<? extends IBase> theElements) {
if (theElements == null) {
return true;
@ -141,8 +123,7 @@ public class ElementUtil {
//@SuppressWarnings("unchecked")
private static <T extends IElement> void addElement(ArrayList<T> retVal, IElement next, Class<T> theType) {
//FIXME There seems to be an error on theType == null => if (theType != null|| theType.isAssignableFrom
if (theType == null|| theType.isAssignableFrom(next.getClass())) {
if (theType != null && theType.isAssignableFrom(next.getClass())) {
retVal.add(theType.cast(next));
}
if (next instanceof ICompositeElement) {

View File

@ -58,7 +58,8 @@ public enum VersionEnum {
V4_0_0,
V4_0_3,
V4_1_0,
V4_2_0;
V4_2_0,
V4_3_0;
public static VersionEnum latestVersion() {
VersionEnum[] values = VersionEnum.values();

View File

@ -39,12 +39,17 @@ import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class SchemaBaseValidator implements IValidatorModule {
public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
@ -179,18 +184,20 @@ public class SchemaBaseValidator implements IValidatorModule {
input.setPublicId(thePublicId);
input.setSystemId(theSystemId);
input.setBaseURI(theBaseURI);
// String pathToBase = "ca/uhn/fhir/model/" + myVersion + "/schema/" + theSystemId;
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSystemId;
ourLog.debug("Loading referenced schema file: " + pathToBase);
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
if (baseIs == null) {
throw new InternalErrorException("Schema file not found: " + pathToBase);
}
try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
if (baseIs == null) {
throw new InternalErrorException("Schema file not found: " + pathToBase);
}
input.setByteStream(baseIs);
//FIXME resource leak
byte[] bytes = IOUtils.toByteArray(baseIs);
input.setByteStream(new ByteArrayInputStream(bytes));
} catch (IOException e) {
throw new InternalErrorException(e);
}
return input;
}

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -26,7 +26,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.QuoteMode;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.ConceptMap;
@ -43,6 +42,7 @@ import java.util.List;
import java.util.concurrent.ExecutionException;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConceptMapCommand {
// TODO: Don't use qualified names for loggers in HAPI CLI.
@ -114,7 +114,7 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept
private void convertConceptMapToCsv(org.hl7.fhir.dstu3.model.ConceptMap theConceptMap) throws ExecutionException {
try {
convertConceptMapToCsv(VersionConvertor_30_40.convertConceptMap(theConceptMap));
convertConceptMapToCsv(convertConceptMap(theConceptMap));
} catch (FHIRException fe) {
throw new ExecutionException(fe);
}

View File

@ -28,7 +28,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ConceptMap.ConceptMapGroupComponent;
@ -45,7 +44,10 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConceptMapCommand {
// TODO: Don't use qualified names for loggers in HAPI CLI.
@ -154,7 +156,7 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept
private org.hl7.fhir.dstu3.model.ConceptMap convertCsvToConceptMapDstu3() throws ExecutionException {
try {
return VersionConvertor_30_40.convertConceptMap(convertCsvToConceptMapR4());
return convertConceptMap(convertCsvToConceptMapR4());
} catch (FHIRException fe) {
throw new ExecutionException(fe);
}
@ -174,7 +176,7 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept
.withFirstRecordAsHeader()
.withIgnoreHeaderCase()
.withIgnoreEmptyLines()
.withTrim());
.withTrim())
) {
retVal.setUrl(conceptMapUrl);

View File

@ -118,7 +118,7 @@ public class UploadTerminologyCommandTest extends BaseTest {
assertEquals(1, listOfDescriptors.size());
assertEquals("concepts.csv", listOfDescriptors.get(0).getFilename());
String uploadFile = IOUtils.toString(listOfDescriptors.get(0).getInputStream(), Charsets.UTF_8);
assertThat(uploadFile, containsString("CODE,Display"));
assertThat(uploadFile, uploadFile, containsString("\"CODE\",\"Display\""));
}
@Test

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -85,13 +85,11 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory {
public HttpClient getNativeHttpClient() {
if (myHttpClient == null) {
//FIXME potential resoource leak
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000,
TimeUnit.MILLISECONDS);
connectionManager.setMaxTotal(getPoolMaxTotal());
connectionManager.setDefaultMaxPerRoute(getPoolMaxPerRoute());
// @formatter:off
//TODO: Use of a deprecated method should be resolved.
RequestConfig defaultRequestConfig =
RequestConfig.custom()
@ -114,7 +112,6 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory {
}
myHttpClient = builder.build();
// @formatter:on
}
@ -128,7 +125,7 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory {
/**
* Only allows to set an instance of type org.apache.http.client.HttpClient
* @see ca.uhn.fhir.rest.client.api.IRestfulClientFactory#setHttpClient(ca.uhn.fhir.rest.client.api.IHttpClient)
* @see ca.uhn.fhir.rest.client.api.IRestfulClientFactory#setHttpClient(Object)
*/
@Override
public synchronized void setHttpClient(Object theHttpClient) {

View File

@ -1796,11 +1796,8 @@ public class GenericClient extends BaseClient implements IGenericClient {
if (rootSs == null) {
rootSs = nextSortSpec;
} else {
// FIXME lastSs is null never set
// TODO unused assignment
lastSs.setChain(nextSortSpec);
}
// TODO unused assignment
lastSs = nextSortSpec;
}
if (rootSs != null) {

View File

@ -1,5 +1,18 @@
package ca.uhn.fhir.rest.client.method;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
@ -22,20 +35,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* #L%
*/
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
public abstract class BaseQueryParameter implements IParameter {
public abstract List<QualifiedParamList> encode(FhirContext theContext, Object theObject) throws InternalErrorException;
@ -44,12 +43,6 @@ public abstract class BaseQueryParameter implements IParameter {
public abstract RestSearchParameterTypeEnum getParamType();
/**
* Parameter should return true if {@link #parse(FhirContext, List)} should be called even if the query string
* contained no values for the given parameter
*/
public abstract boolean handlesMissing();
@Override
public void initializeTypes(Method theMethod, Class<? extends Collection<?>> theOuterCollectionType, Class<? extends Collection<?>> theInnerCollectionType, Class<?> theParameterType) {
// ignore for now
@ -57,8 +50,6 @@ public abstract class BaseQueryParameter implements IParameter {
public abstract boolean isRequired();
public abstract Object parse(FhirContext theContext, List<QualifiedParamList> theString) throws InternalErrorException, InvalidRequestException;
@Override
public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map<String, List<String>> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException {
if (theSourceClientArgument == null) {
@ -79,11 +70,7 @@ public abstract class BaseQueryParameter implements IParameter {
String qualifier = nextParamEntry.getQualifier();
String paramName = isNotBlank(qualifier) ? getName() + qualifier : getName();
List<String> paramValues = theTargetQueryArguments.get(paramName);
if (paramValues == null) {
paramValues = new ArrayList<>(value.size());
theTargetQueryArguments.put(paramName, paramValues);
}
List<String> paramValues = theTargetQueryArguments.computeIfAbsent(paramName, k -> new ArrayList<>(value.size()));
paramValues.add(b.toString());
}

View File

@ -19,16 +19,23 @@ package ca.uhn.fhir.rest.client.method;
* limitations under the License.
* #L%
*/
import java.util.*;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.annotation.IncludeParam;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
class IncludeParameter extends BaseQueryParameter {
@ -37,11 +44,12 @@ class IncludeParameter extends BaseQueryParameter {
private Class<?> mySpecType;
private boolean myReverse;
public IncludeParameter(IncludeParam theAnnotation, Class<? extends Collection<Include>> theInstantiableCollectionType, Class<?> theSpecType) {
myInstantiableCollectionType = theInstantiableCollectionType;
myReverse = theAnnotation.reverse();
if (theAnnotation.allow().length > 0) {
myAllow = new HashSet<String>();
myAllow = new HashSet<>();
for (String next : theAnnotation.allow()) {
if (next != null) {
myAllow.add(next);
@ -61,7 +69,7 @@ class IncludeParameter extends BaseQueryParameter {
@SuppressWarnings("unchecked")
@Override
public List<QualifiedParamList> encode(FhirContext theContext, Object theObject) throws InternalErrorException {
ArrayList<QualifiedParamList> retVal = new ArrayList<QualifiedParamList>();
ArrayList<QualifiedParamList> retVal = new ArrayList<>();
if (myInstantiableCollectionType == null) {
if (mySpecType == Include.class) {
@ -105,58 +113,9 @@ class IncludeParameter extends BaseQueryParameter {
return null;
}
@Override
public boolean handlesMissing() {
return true;
}
@Override
public boolean isRequired() {
return false;
}
@Override
public Object parse(FhirContext theContext, List<QualifiedParamList> theString) throws InternalErrorException, InvalidRequestException {
Collection<Include> retValCollection = null;
if (myInstantiableCollectionType != null) {
try {
retValCollection = myInstantiableCollectionType.newInstance();
} catch (Exception e) {
throw new InternalErrorException("Failed to instantiate " + myInstantiableCollectionType.getName(), e);
}
}
for (QualifiedParamList nextParamList : theString) {
if (nextParamList.isEmpty()) {
continue;
}
if (nextParamList.size() > 1) {
throw new InvalidRequestException(theContext.getLocalizer().getMessage(IncludeParameter.class, "orIncludeInRequest"));
}
boolean recurse = Constants.PARAM_INCLUDE_QUALIFIER_RECURSE.equals(nextParamList.getQualifier());
String value = nextParamList.get(0);
if (myAllow != null && !myAllow.isEmpty()) {
if (!myAllow.contains(value)) {
if (!myAllow.contains("*")) {
String msg = theContext.getLocalizer().getMessage(IncludeParameter.class, "invalidIncludeNameInRequest", value, new TreeSet<String>(myAllow).toString(), getName());
throw new InvalidRequestException(msg);
}
}
}
if (myInstantiableCollectionType == null) {
if (mySpecType == String.class) {
return value;
}
return new Include(value, recurse);
}
//FIXME null access
retValCollection.add(new Include(value, recurse));
}
return retValCollection;
}
}

View File

@ -1,29 +1,51 @@
package ca.uhn.fhir.rest.client.method;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.*;
import java.util.Map.Entry;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.method.OperationParameter.IOperationParamConverter;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.binder.CollectionBinder;
import ca.uhn.fhir.util.*;
import ca.uhn.fhir.util.DateUtils;
import ca.uhn.fhir.util.ParametersUtil;
import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
* #%L
@ -45,7 +67,6 @@ import ca.uhn.fhir.util.*;
* #L%
*/
@SuppressWarnings("deprecation")
public class MethodUtil {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(MethodUtil.class);
@ -288,33 +309,25 @@ public class MethodUtil {
specType = parameterType;
}
param = new IncludeParameter((IncludeParam) nextAnnotation, instantiableCollectionType,
specType);
param = new IncludeParameter((IncludeParam) nextAnnotation, instantiableCollectionType, specType);
} else if (nextAnnotation instanceof ResourceParam) {
if (IBaseResource.class.isAssignableFrom(parameterType)) {
// good
} else if (String.class.equals(parameterType)) {
// good
} else if (byte[].class.equals(parameterType)) {
// good
} else if (EncodingEnum.class.equals(parameterType)) {
// good
} else {
StringBuilder b = new StringBuilder();
b.append("Method '");
b.append(theMethod.getName());
b.append("' is annotated with @");
b.append(ResourceParam.class.getSimpleName());
b.append(" but has a type that is not an implemtation of ");
b.append(" but has a type that is not an implementation of ");
b.append(IBaseResource.class.getCanonicalName());
b.append(" or String or byte[]");
throw new ConfigurationException(b.toString());
}
param = new ResourceParameter(parameterType);
} else if (nextAnnotation instanceof IdParam) {
param = new NullParameter();
} else if (nextAnnotation instanceof ServerBase) {
param = new ServerBaseParamBinder();
} else if (nextAnnotation instanceof Elements) {
param = new ElementsParameter();
} else if (nextAnnotation instanceof Since) {
@ -345,19 +358,6 @@ public class MethodUtil {
}
param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE,
Constants.EXTOP_VALIDATE_MODE, 0, 1).setConverter(new IOperationParamConverter() {
@Override
public Object incomingServer(Object theObject) {
if (isNotBlank(theObject.toString())) {
ValidationModeEnum retVal = ValidationModeEnum
.forCode(theObject.toString());
if (retVal == null) {
OperationParameter.throwInvalidMode(theObject.toString());
}
return retVal;
}
return null;
}
@Override
public Object outgoingClient(Object theObject) {
return ParametersUtil.createString(theContext,
@ -372,10 +372,6 @@ public class MethodUtil {
}
param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE,
Constants.EXTOP_VALIDATE_PROFILE, 0, 1).setConverter(new IOperationParamConverter() {
@Override
public Object incomingServer(Object theObject) {
return theObject.toString();
}
@Override
public Object outgoingClient(Object theObject) {

View File

@ -191,8 +191,6 @@ public class OperationParameter implements IParameter {
interface IOperationParamConverter {
Object incomingServer(Object theObject);
Object outgoingClient(Object theObject);
}
@ -203,13 +201,6 @@ public class OperationParameter implements IParameter {
Validate.isTrue(mySearchParameterBinding != null);
}
@Override
public Object incomingServer(Object theObject) {
IPrimitiveType<?> obj = (IPrimitiveType<?>) theObject;
List<QualifiedParamList> paramList = Collections.singletonList(QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, obj.getValueAsString()));
return mySearchParameterBinding.parse(myContext, paramList);
}
@Override
public Object outgoingClient(Object theObject) {
IQueryParameterType obj = (IQueryParameterType) theObject;

View File

@ -156,26 +156,11 @@ public class SearchParameter extends BaseQueryParameter {
return myType;
}
@Override
public boolean handlesMissing() {
return false;
}
@Override
public boolean isRequired() {
return myRequired;
}
/*
* (non-Javadoc)
*
* @see ca.uhn.fhir.rest.param.IParameter#parse(java.util.List)
*/
@Override
public Object parse(FhirContext theContext, List<QualifiedParamList> theString) throws InternalErrorException, InvalidRequestException {
return myParamBinder.parse(theContext, getName(), theString);
}
public void setChainlists(String[] theChainWhitelist) {
myQualifierWhitelist = new HashSet<>(theChainWhitelist.length);
myQualifierWhitelist.add(QUALIFIER_ANY_TYPE);

View File

@ -1,47 +0,0 @@
package ca.uhn.fhir.rest.client.method;
/*
* #%L
* HAPI FHIR - Client Framework
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.lang.reflect.Method;
import java.util.*;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
class ServerBaseParamBinder implements IParameter {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerBaseParamBinder.class);
@Override
public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map<String, List<String>> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException {
/*
* Does nothing, since we just ignore serverbase arguments
*/
ourLog.trace("Ignoring server base argument: {}", theSourceClientArgument);
}
@Override
public void initializeTypes(Method theMethod, Class<? extends Collection<?>> theOuterCollectionType, Class<? extends Collection<?>> theInnerCollectionType, Class<?> theParameterType) {
// ignore for now
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -31,7 +31,11 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter;
import org.hl7.fhir.converter.NullVersionConverterAdvisor30;
import org.hl7.fhir.converter.NullVersionConverterAdvisor40;
import org.hl7.fhir.convertors.*;
import org.hl7.fhir.convertors.VersionConvertorAdvisor30;
import org.hl7.fhir.convertors.VersionConvertorAdvisor40;
import org.hl7.fhir.convertors.VersionConvertor_10_30;
import org.hl7.fhir.convertors.VersionConvertor_10_40;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -40,7 +44,9 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.StringTokenizer;
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* <b>This is an experimental interceptor! Use with caution as
@ -54,16 +60,12 @@ import static org.apache.commons.lang3.StringUtils.*;
public class VersionedApiConverterInterceptor extends InterceptorAdapter {
private final FhirContext myCtxDstu2;
private final FhirContext myCtxDstu2Hl7Org;
private VersionConvertor_30_40 myVersionConvertor_30_40;
private VersionConvertor_10_40 myVersionConvertor_10_40;
private VersionConvertor_10_30 myVersionConvertor_10_30;
private final NullVersionConverterAdvisor40 advisor40;
private final NullVersionConverterAdvisor30 advisor30;
public VersionedApiConverterInterceptor() {
myVersionConvertor_30_40 = new VersionConvertor_30_40();
VersionConvertorAdvisor40 advisor40 = new NullVersionConverterAdvisor40();
myVersionConvertor_10_40 = new VersionConvertor_10_40(advisor40);
VersionConvertorAdvisor30 advisor30 = new NullVersionConverterAdvisor30();
myVersionConvertor_10_30 = new VersionConvertor_10_30(advisor30);
advisor40 = new NullVersionConverterAdvisor40();
advisor30 = new NullVersionConverterAdvisor30();
myCtxDstu2 = FhirContext.forDstu2();
myCtxDstu2Hl7Org = FhirContext.forDstu2Hl7Org();
@ -104,17 +106,17 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter {
IBaseResource converted = null;
try {
if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU3) {
converted = myVersionConvertor_30_40.convertResource(toDstu3(responseResource), true);
converted = VersionConvertor_30_40.convertResource(toDstu3(responseResource), true);
} else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.R4) {
converted = myVersionConvertor_30_40.convertResource(toR4(responseResource), true);
converted = VersionConvertor_30_40.convertResource(toR4(responseResource), true);
} else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.R4) {
converted = myVersionConvertor_10_40.convertResource(toR4(responseResource));
converted = VersionConvertor_10_40.convertResource(toR4(responseResource), advisor40);
} else if (wantVersion == FhirVersionEnum.R4 && haveVersion == FhirVersionEnum.DSTU2) {
converted = myVersionConvertor_10_40.convertResource(toDstu2(responseResource));
converted = VersionConvertor_10_40.convertResource(toDstu2(responseResource), advisor40);
} else if (wantVersion == FhirVersionEnum.DSTU2 && haveVersion == FhirVersionEnum.DSTU3) {
converted = myVersionConvertor_10_30.convertResource(toDstu3(responseResource));
converted = VersionConvertor_10_30.convertResource(toDstu3(responseResource), advisor30);
} else if (wantVersion == FhirVersionEnum.DSTU3 && haveVersion == FhirVersionEnum.DSTU2) {
converted = myVersionConvertor_10_30.convertResource(toDstu2(responseResource));
converted = VersionConvertor_10_30.convertResource(toDstu2(responseResource), advisor30);
}
} catch (FHIRException e) {
throw new InternalErrorException(e);

View File

@ -27,8 +27,12 @@ import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeSystem;
import org.hl7.fhir.r5.model.ValueSet;
import java.util.IdentityHashMap;
public class NullVersionConverterAdvisor50 implements VersionConvertorAdvisor50 {
private IdentityHashMap<ValueSet, CodeSystem> myCodeSystems = new IdentityHashMap<>();
@Override
public boolean ignoreEntry(Bundle.BundleEntryComponent src) {
return false;
@ -56,11 +60,11 @@ public class NullVersionConverterAdvisor50 implements VersionConvertorAdvisor50
@Override
public void handleCodeSystem(CodeSystem tgtcs, ValueSet source) throws FHIRException {
myCodeSystems.put(source, tgtcs);
}
@Override
public CodeSystem getCodeSystem(ValueSet src) throws FHIRException {
return null;
return myCodeSystems.get(src);
}
}

View File

@ -16,13 +16,10 @@ public class VersionConvertor_10_30Test {
@Test
public void testConvert() throws FHIRException {
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input);
org.hl7.fhir.dstu3.model.Observation output = (Observation) VersionConvertor_10_30.convertResource(input);
String context = output.getContext().getReference();
assertEquals("Encounter/123", context);
@ -31,9 +28,6 @@ public class VersionConvertor_10_30Test {
@Test
public void testConvertSpecimen() throws FHIRException {
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
Specimen spec = new Specimen();
CodeableConcept cc = new CodeableConcept();
Coding coding = new Coding();
@ -58,7 +52,7 @@ public class VersionConvertor_10_30Test {
Specimen.SpecimenContainerComponent specimenContainerComponent = new Specimen.SpecimenContainerComponent();
specimenContainerComponent.getExtension().add(new Extension().setUrl("some_url").setValue(new StringType("some_value")));
spec.setContainer(Collections.singletonList(specimenContainerComponent));
Resource resource = converter.convertResource(spec);
Resource resource = VersionConvertor_10_30.convertResource(spec);
}

View File

@ -3,6 +3,7 @@ package org.hl7.fhir.converter;
import static org.junit.Assert.assertEquals;
import org.hl7.fhir.convertors.VersionConvertor_14_30;
import org.hl7.fhir.dstu3.model.Questionnaire;
import org.hl7.fhir.exceptions.FHIRException;
import org.junit.Test;
@ -14,7 +15,7 @@ public class VersionConvertor_14_30Test {
org.hl7.fhir.dstu2016may.model.Questionnaire input = new org.hl7.fhir.dstu2016may.model.Questionnaire();
input.setTitle("My title");
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input);
org.hl7.fhir.dstu3.model.Questionnaire output = (Questionnaire) VersionConvertor_14_30.convertResource(input);
String context = output.getTitle();
assertEquals("My title", context);

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -73,13 +73,13 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
@ -96,7 +96,7 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<classifier>classes</classifier>
</dependency>
<dependency>

View File

@ -20,9 +20,8 @@ package ca.uhn.hapi.fhir.docs;
* #L%
*/
import org.hl7.fhir.converter.NullVersionConverterAdvisor30;
import org.hl7.fhir.convertors.VersionConvertor_10_30;
import org.hl7.fhir.convertors.VersionConvertor_14_30;
import org.hl7.fhir.convertors.conv10_30.Observation10_30;
import org.hl7.fhir.convertors.conv14_30.Questionnaire14_30;
import org.hl7.fhir.exceptions.FHIRException;
public class ConverterExamples {
@ -30,16 +29,12 @@ public class ConverterExamples {
@SuppressWarnings("unused")
public void c1020() throws FHIRException {
//START SNIPPET: 1020
// Create a converter
NullVersionConverterAdvisor30 advisor = new NullVersionConverterAdvisor30();
VersionConvertor_10_30 converter = new VersionConvertor_10_30(advisor);
// Create an input resource to convert
org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation();
input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123"));
// Convert the resource
org.hl7.fhir.dstu3.model.Observation output = converter.convertObservation(input);
org.hl7.fhir.dstu3.model.Observation output = Observation10_30.convertObservation(input);
String context = output.getContext().getReference();
//END SNIPPET: 1020
}
@ -52,7 +47,7 @@ public class ConverterExamples {
input.setTitle("My title");
// Convert the resource
org.hl7.fhir.dstu3.model.Questionnaire output = VersionConvertor_14_30.convertQuestionnaire(input);
org.hl7.fhir.dstu3.model.Questionnaire output = Questionnaire14_30.convertQuestionnaire(input);
String context = output.getTitle();
//END SNIPPET: 1420
}

View File

@ -225,17 +225,6 @@
"lon": -3.6840,
"added": "2019-09-29"
},
{
"title": "ResMed",
"description": "Digital Health Technologies: The largest tele-monitored device fleet in the world. Focus on various respiratory diseases from Sleep Disordered breathing (sleep apnea) to COPD.",
"link": null,
"contactName": null,
"contactEmail": null,
"city": "San Diego, California",
"lat": 32.8246,
"lon": -117.1971,
"added": "2019-09-29"
},
{
"title": "Highmark",
"description": "FHIR-based Provider Directory Message Model: Using R4 FHIR we created a standard provider directory message model that captures all practitioners, practitioner roles, vendors, vendor roles, locations, healthcare services, affiliated directory networks and tiered insurance plans. We supply this FHIR-based extract to a well-known transparency vendor whom processes our FHIR message model extract and loads the data to be consumed and accessed on a web-based provider search application. Members can then access this provider search app to search for practitioners whom participate in their products and networks.",

View File

@ -0,0 +1,6 @@
---
type: add
issue: 1694
title: It is now possible to specify resource provider method annotations on interface methods implemented by
resource provider classes, as opposed to needing to specify them directly on the concrete class. Thanks
to Tue Toft Nørgård for the pull request!

View File

@ -1,2 +1,3 @@
---
release-date: "TBD"
release-date: "2020-02-15"
codename: "Koala"

View File

@ -0,0 +1,6 @@
---
type: change
issue: 1715
title: The version converters for all versions except R4/R5 have been reworked to be split into individual
classes per resource type (the R4/R5 converters were already organized this way). Thanks to Mark Iantorno
for a huge effort to write a Java source parser/serializer to acomplish this task.

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>hapi-deployable-pom</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jaxrs.server.util;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.ParseAction;
import ca.uhn.fhir.rest.server.RestfulResponse;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.List;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
@ -21,23 +41,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* limitations under the License.
* #L%
*/
import java.io.*;
import java.util.List;
import java.util.Map.Entry;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.server.ParseAction;
import ca.uhn.fhir.rest.server.RestfulResponse;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
/**
* The JaxRsResponse is a jax-rs specific implementation of the RestfulResponse.
@ -60,8 +63,7 @@ public class JaxRsResponse extends RestfulResponse<JaxRsRequest> {
* by the server.
*/
@Override
public Writer getResponseWriter(int theStatusCode, String theStatusMessage, String theContentType, String theCharset, boolean theRespondGzip)
throws UnsupportedEncodingException, IOException {
public Writer getResponseWriter(int theStatusCode, String theStatusMessage, String theContentType, String theCharset, boolean theRespondGzip) {
return new StringWriter();
}
@ -78,7 +80,7 @@ public class JaxRsResponse extends RestfulResponse<JaxRsRequest> {
}
@Override
public Response sendAttachmentResponse(IBaseBinary bin, int statusCode, String contentType) throws IOException {
public Object sendAttachmentResponse(IBaseBinary bin, int statusCode, String contentType) {
ResponseBuilder response = buildResponse(statusCode);
if (bin.getContent() != null && bin.getContent().length > 0) {
response.header(Constants.HEADER_CONTENT_TYPE, contentType).entity(bin.getContent());

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -199,26 +199,7 @@ public class JaxRsPatientProviderDstu3Test {
//assertEquals(e.getStatusCode(), Constants.STATUS_HTTP_404_NOT_FOUND);
}
}
/** Transaction - Server */
@Ignore
@Test
public void testTransaction() {
Bundle bundle = new Bundle();
BundleEntryComponent entry = bundle.addEntry();
final Patient existing = new Patient();
existing.getName().get(0).setFamily("Created with bundle");
entry.setResource(existing);
// FIXME ?
// BoundCodeDt<BundleEntryTransactionMethodEnum> theTransactionOperation =
// new BoundCodeDt(
// BundleEntryTransactionMethodEnum.VALUESET_BINDER,
// BundleEntryTransactionMethodEnum.POST);
// entry.setTransactionMethod(theTransactionOperation);
Bundle response = client.transaction().withBundle(bundle).execute();
}
/** Conformance - Server */
@Test
@Ignore

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>4.2.0-SNAPSHOT</version>
<version>4.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -608,7 +608,7 @@
<scope>compile</scope>
</dependency>
</dependencies>
</dependencies>
<properties>

View File

@ -368,7 +368,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
Search search = new Search();
search.setDeleted(false);
search.setCreated(new Date());
search.setSearchLastReturned(new Date());
search.setLastUpdated(theSince, theUntil);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);

View File

@ -1097,11 +1097,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public Set<ResourcePersistentId> searchForIds(SearchParameterMap theParams, RequestDetails theRequest) {
theParams.setLoadSynchronousUpTo(10000);
ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
// FIXME: fail if too many results
HashSet<ResourcePersistentId> retVal = new HashSet<>();
String uuid = UUID.randomUUID().toString();

View File

@ -21,12 +21,12 @@ package ca.uhn.fhir.jpa.dao;
*/
import ca.uhn.fhir.model.dstu2.resource.StructureDefinition;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
public class FhirResourceDaoStructureDefinitionDstu2 extends BaseHapiFhirResourceDao<StructureDefinition> implements IFhirResourceDaoStructureDefinition<StructureDefinition> {
@Override
public StructureDefinition generateSnapshot(StructureDefinition theInput, String theUrl, String theWebUrl, String theName) {
// FIXME: implement
return null;
throw new InvalidRequestException("Snapshot generation not supported for DSTU2");
}
}

View File

@ -29,4 +29,6 @@ public interface IResultIterator extends Iterator<ResourcePersistentId>, Closeab
int getSkippedCount();
int getNonSkippedCount();
}

View File

@ -65,6 +65,7 @@ import org.apache.commons.lang3.Validate;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hl7.fhir.dstu3.model.Location;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
@ -160,6 +161,10 @@ public class SearchBuilder implements ISearchBuilder {
// Remove any empty parameters
theParams.clean();
if (myResourceType == Location.class) {
theParams.setLocationDistance();
}
/*
* Check if there is a unique key associated with the set
* of parameters passed in
@ -181,7 +186,6 @@ public class SearchBuilder implements ISearchBuilder {
}
@Override
public Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest) {
init(theParams, theSearchUuid);
@ -250,7 +254,8 @@ public class SearchBuilder implements ISearchBuilder {
outerQuery.multiselect(myBuilder.countDistinct(myQueryRoot.getRoot()));
} else {
outerQuery.multiselect(myQueryRoot.get("myId").as(Long.class));
outerQuery.distinct(true);
// KHS This distinct call is causing performance issues in large installations
// outerQuery.distinct(true);
}
}
@ -961,6 +966,7 @@ public class SearchBuilder implements ISearchBuilder {
private SortSpec mySort;
private boolean myStillNeedToFetchIncludes;
private int mySkipCount = 0;
private int myNonSkipCount = 0;
private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
mySearchRuntimeDetails = theSearchRuntimeDetails;
@ -990,14 +996,7 @@ public class SearchBuilder implements ISearchBuilder {
myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum();
}
final TypedQuery<Long> query = createQuery(mySort, myMaxResultsToFetch, false, myRequest);
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
initializeIteratorQuery(myMaxResultsToFetch);
// If the query resulted in extra results being requested
if (myAlsoIncludePids != null) {
@ -1032,11 +1031,32 @@ public class SearchBuilder implements ISearchBuilder {
ResourcePersistentId next = new ResourcePersistentId(nextLong);
if (myPidSet.add(next)) {
myNext = next;
myNonSkipCount++;
break;
} else {
mySkipCount++;
}
}
if (!myResultsIterator.hasNext()) {
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
myMaxResultsToFetch += 1000;
StorageProcessingMessage message = new StorageProcessingMessage();
String msg = "Pass completed with no matching results. This indicates an inefficient query! Retrying with new max count of " + myMaxResultsToFetch;
ourLog.warn(msg);
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(StorageProcessingMessage.class, message);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_WARNING, params);
initializeIteratorQuery(myMaxResultsToFetch);
}
}
}
}
}
@ -1096,6 +1116,20 @@ public class SearchBuilder implements ISearchBuilder {
}
private void initializeIteratorQuery(Integer theMaxResultsToFetch) {
final TypedQuery<Long> query = createQuery(mySort, theMaxResultsToFetch, false, myRequest);
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
mySkipCount = 0;
myNonSkipCount = 0;
}
@Override
public boolean hasNext() {
if (myNext == null) {
@ -1118,6 +1152,11 @@ public class SearchBuilder implements ISearchBuilder {
return mySkipCount;
}
@Override
public int getNonSkippedCount() {
return myNonSkipCount;
}
@Override
public void close() {
if (myResultsIterator != null) {

View File

@ -32,7 +32,6 @@ import ca.uhn.fhir.jpa.model.entity.ForcedId;
public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
// FIXME: JA We should log a performance warning if this is used since it's not indexed
@Query("SELECT f.myResourcePid FROM ForcedId f WHERE myForcedId IN (:forced_id)")
List<Long> findByForcedId(@Param("forced_id") Collection<String> theForcedId);

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
@ -38,16 +37,12 @@ public interface ISearchDao extends JpaRepository<Search, Long> {
@Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid")
Optional<Search> findByUuidAndFetchIncludes(@Param("uuid") String theUuid);
@Query("SELECT s.myId FROM Search s WHERE (s.mySearchLastReturned < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now)")
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, @Param("now") Date theNow, Pageable thePage);
@Query("SELECT s.myId FROM Search s WHERE (s.myCreated < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now)")
Slice<Long> findWhereCreatedBefore(@Param("cutoff") Date theCutoff, @Param("now") Date theNow, Pageable thePage);
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = false AND s.myStatus <> 'FAILED'")
Collection<Search> findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
@Modifying
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
void updateSearchLastReturned(@Param("pid") long thePid, @Param("last") Date theDate);
@Modifying
@Query("UPDATE Search s SET s.myDeleted = :deleted WHERE s.myId = :pid")
void updateDeleted(@Param("pid") Long thePid, @Param("deleted") boolean theDeleted);

View File

@ -21,8 +21,6 @@ package ca.uhn.fhir.jpa.dao.data;
*/
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
@ -37,6 +35,4 @@ public interface ITermValueSetConceptDesignationDao extends JpaRepository<TermVa
@Modifying
void deleteByTermValueSetId(@Param("pid") Long theValueSetId);
@Query("SELECT vscd FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid")
Slice<TermValueSetConceptDesignation> findByTermValueSetConceptId(Pageable thePage, @Param("pid") Long theValueSetConceptId);
}

View File

@ -24,10 +24,10 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeableConcept;
@ -53,6 +52,7 @@ import java.util.List;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
@Transactional
public class FhirResourceDaoCodeSystemDstu3 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> {
@ -147,7 +147,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends BaseHapiFhirResourceDao<Code
CodeSystem csDstu3 = (CodeSystem) theResource;
org.hl7.fhir.r4.model.CodeSystem cs = VersionConvertor_30_40.convertCodeSystem(csDstu3);
org.hl7.fhir.r4.model.CodeSystem cs = convertCodeSystem(csDstu3);
addPidToResource(theEntity, cs);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, (ResourceTable) theEntity);

View File

@ -22,21 +22,24 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.TranslationMatch;
import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.ConceptMap;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.UriType;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Date;
@ -44,6 +47,8 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
@Autowired
private ITermReadSvc myHapiTerminologySvc;
@ -166,7 +171,7 @@ public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<Conc
if (retVal.getDeleted() == null) {
try {
ConceptMap conceptMap = (ConceptMap) theResource;
org.hl7.fhir.r4.model.ConceptMap converted = VersionConvertor_30_40.convertConceptMap(conceptMap);
org.hl7.fhir.r4.model.ConceptMap converted = convertConceptMap(conceptMap);
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
} catch (FHIRException fe) {
throw new InternalErrorException(fe);

View File

@ -33,12 +33,15 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil;
import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent;
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
@ -51,7 +54,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import java.util.Collections;
import java.util.Date;
@ -59,6 +61,7 @@ import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
@ -420,7 +423,7 @@ public class FhirResourceDaoValueSetDstu3 extends BaseHapiFhirResourceDao<ValueS
if (retVal.getDeleted() == null) {
try {
ValueSet valueSet = (ValueSet) theResource;
org.hl7.fhir.r4.model.ValueSet converted = VersionConvertor_30_40.convertValueSet(valueSet);
org.hl7.fhir.r4.model.ValueSet converted = convertValueSet(valueSet);
myHapiTerminologySvc.storeTermValueSet(retVal, converted);
} catch (FHIRException fe) {
throw new InternalErrorException(fe);

View File

@ -148,7 +148,7 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
CodeSystem cs = (CodeSystem) theResource;
addPidToResource(theEntity, theResource);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), (ResourceTable) theEntity);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(cs), (ResourceTable) theEntity);
}
return retVal;

View File

@ -166,7 +166,7 @@ public class FhirResourceDaoConceptMapR5 extends BaseHapiFhirResourceDao<Concept
if (retVal.getDeleted() == null) {
ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(conceptMap));
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(conceptMap));
} else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
}

View File

@ -415,7 +415,7 @@ public class FhirResourceDaoValueSetR5 extends BaseHapiFhirResourceDao<ValueSet>
if (myDaoConfig.isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
if (retVal.getDeleted() == null) {
ValueSet valueSet = (ValueSet) theResource;
myHapiTerminologySvc.storeTermValueSet(retVal, org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSet));
myHapiTerminologySvc.storeTermValueSet(retVal, org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSet));
} else {
myHapiTerminologySvc.deleteValueSetAndChildren(retVal);
}

View File

@ -13,7 +13,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import java.io.Serializable;
import java.util.*;
@ -43,8 +42,8 @@ import static org.apache.commons.lang3.StringUtils.left;
@Table(name = "HFJ_SEARCH", uniqueConstraints = {
@UniqueConstraint(name = "IDX_SEARCH_UUID", columnNames = "SEARCH_UUID")
}, indexes = {
@Index(name = "IDX_SEARCH_LASTRETURNED", columnList = "SEARCH_LAST_RETURNED"),
@Index(name = "IDX_SEARCH_RESTYPE_HASHS", columnList = "RESOURCE_TYPE,SEARCH_QUERY_STRING_HASH,CREATED")
@Index(name = "IDX_SEARCH_RESTYPE_HASHS", columnList = "RESOURCE_TYPE,SEARCH_QUERY_STRING_HASH,CREATED"),
@Index(name = "IDX_SEARCH_CREATED", columnList = "CREATED")
})
public class Search implements ICachedSearchDetails, Serializable {
@ -90,11 +89,6 @@ public class Search implements ICachedSearchDetails, Serializable {
private Long myResourceId;
@Column(name = "RESOURCE_TYPE", length = 200, nullable = true)
private String myResourceType;
@NotNull
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "SEARCH_LAST_RETURNED", nullable = false, updatable = false)
@OptimisticLock(excluded = true)
private Date mySearchLastReturned;
@Lob()
@Basic(fetch = FetchType.LAZY)
@Column(name = "SEARCH_QUERY_STRING", nullable = true, updatable = false, length = MAX_SEARCH_QUERY_STRING)
@ -261,14 +255,6 @@ public class Search implements ICachedSearchDetails, Serializable {
myResourceType = theResourceType;
}
public Date getSearchLastReturned() {
return mySearchLastReturned;
}
public void setSearchLastReturned(Date theDate) {
mySearchLastReturned = theDate;
}
public String getSearchQueryString() {
return mySearchQueryString;
}

View File

@ -33,9 +33,7 @@ import java.io.Serializable;
import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length;
@Table(name = "TRM_VALUESET_C_DESIGNATION", indexes = {
@Index(name = "IDX_VALUESET_C_DSGNTN_VAL", columnList = "VAL")
})
@Table(name = "TRM_VALUESET_C_DESIGNATION")
@Entity()
public class TermValueSetConceptDesignation implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -57,7 +57,6 @@ public class JpaPreResourceAccessDetails implements IPreResourceAccessDetails {
public IBaseResource getResource(int theIndex) {
if (myResources == null) {
myResources = new ArrayList<>(myResourcePids.size());
// FIXME: JA don't call interceptors for this query
mySearchBuilderSupplier.call().loadResourcesByPid(myResourcePids, Collections.emptySet(), myResources, false, null);
}
return myResources.get(theIndex);

View File

@ -39,7 +39,6 @@ import ca.uhn.fhir.util.ValidateUtil;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType;
@ -53,9 +52,15 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.*;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
public class TerminologyUploaderProvider extends BaseJpaProvider {
@ -226,9 +231,9 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
b.append(ConceptHandler.DISPLAY);
b.append("\n");
for (Map.Entry<String, String> nextEntry : codes.entrySet()) {
b.append(nextEntry.getKey());
b.append(csvEscape(nextEntry.getKey()));
b.append(",");
b.append(defaultString(nextEntry.getValue()));
b.append(csvEscape(nextEntry.getValue()));
b.append("\n");
}
byte[] bytes = b.toString().getBytes(Charsets.UTF_8);
@ -245,9 +250,9 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
b.append(HierarchyHandler.PARENT);
b.append("\n");
for (Map.Entry<String, String> nextEntry : codeToParentCodes.entries()) {
b.append(nextEntry.getKey());
b.append(csvEscape(nextEntry.getKey()));
b.append(",");
b.append(defaultString(nextEntry.getValue()));
b.append(csvEscape(nextEntry.getValue()));
b.append("\n");
}
byte[] bytes = b.toString().getBytes(Charsets.UTF_8);
@ -267,10 +272,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
CodeSystem nextCodeSystem;
switch (getContext().getVersion().getVersion()) {
case DSTU3:
nextCodeSystem = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem);
nextCodeSystem = convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem);
break;
case R5:
nextCodeSystem = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem);
nextCodeSystem = org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem);
break;
default:
nextCodeSystem = (CodeSystem) theCodeSystem;
@ -354,7 +359,6 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
return retVal;
}
private static class FileBackedFileDescriptor implements ITermLoaderSvc.FileDescriptor {
private final File myNextFile;
@ -376,4 +380,13 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
}
}
}
private static String csvEscape(String theValue) {
return '"' +
theValue
.replace("\"", "\"\"")
.replace("\n", "\\n")
.replace("\r", "") +
'"';
}
}

View File

@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.provider.dstu3;
*/
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
@ -31,11 +31,21 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.CodeType;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.ConceptMap;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.exceptions.FHIRException;
import javax.servlet.http.HttpServletRequest;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderDstu3<ConceptMap> {
@Operation(name = JpaConstants.OPERATION_TRANSLATE, idempotent = true, returnParameters = {
@OperationParam(name = "result", type = BooleanType.class, min = 1, max = 1),
@ -129,7 +139,7 @@ public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderD
TranslationResult result = dao.translate(translationRequest, theRequestDetails);
// Convert from R4 to DSTU3
return VersionConvertor_30_40.convertParameters(result.toParameters());
return convertParameters(result.toParameters());
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
} finally {

View File

@ -21,17 +21,28 @@ package ca.uhn.fhir.jpa.provider.dstu3;
*/
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
import ca.uhn.fhir.rest.annotation.ConditionalUrlParam;
import ca.uhn.fhir.rest.annotation.Create;
import ca.uhn.fhir.rest.annotation.Delete;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.ResourceParam;
import ca.uhn.fhir.rest.annotation.Update;
import ca.uhn.fhir.rest.annotation.Validate;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType;
@ -41,6 +52,7 @@ import javax.servlet.http.HttpServletRequest;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_ADD;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_DELETE;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaResourceProvider<T> {
@ -91,7 +103,7 @@ public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaRes
RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
try {
return VersionConvertor_30_40.convertParameters(retVal);
return convertParameters(retVal);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -107,7 +119,7 @@ public class JpaResourceProviderDstu3<T extends IAnyResource> extends BaseJpaRes
RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
try {
return VersionConvertor_30_40.convertParameters(retVal);
return convertParameters(retVal);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}

View File

@ -3,17 +3,26 @@ package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.DecimalType;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IIdType;
@ -29,6 +38,7 @@ import java.util.TreeMap;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.convertors.conv30_40.Parameters30_40.convertParameters;
/*
* #%L
@ -72,7 +82,7 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus<Bundl
) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
try {
return VersionConvertor_30_40.convertParameters(retVal);
return convertParameters(retVal);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -90,7 +100,7 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus<Bundl
) {
org.hl7.fhir.r4.model.Parameters retVal = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
try {
return VersionConvertor_30_40.convertParameters(retVal);
return convertParameters(retVal);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}

View File

@ -122,7 +122,7 @@ public class BaseJpaResourceProviderConceptMapR5 extends JpaResourceProviderR5<C
IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao();
TranslationResult result = dao.translate(translationRequest, theRequestDetails);
org.hl7.fhir.r4.model.Parameters parameters = result.toParameters();
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters);
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
} finally {
endRequest(theServletRequest);
}

View File

@ -86,7 +86,7 @@ public class JpaResourceProviderR5<T extends IAnyResource> extends BaseJpaResour
RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters);
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
}
@ -99,7 +99,7 @@ public class JpaResourceProviderR5<T extends IAnyResource> extends BaseJpaResour
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS) BooleanType theExpungeOldVersions,
RequestDetails theRequest) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters);
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
}
@Operation(name = OPERATION_META, idempotent = true, returnParameters = {

View File

@ -68,7 +68,7 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus<Bundle,
RequestDetails theRequestDetails
) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters);
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
}
@Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
@ -82,7 +82,7 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus<Bundle,
RequestDetails theRequestDetails
) {
org.hl7.fhir.r4.model.Parameters parameters = super.doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails);
return org.hl7.fhir.convertors.conv40_50.Parameters.convertParameters(parameters);
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
}
// This is generated by hand:

View File

@ -406,8 +406,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
mySearchCacheSvc.updateSearchLastReturned(searchToUse, new Date());
PersistedJpaBundleProvider retVal = new PersistedJpaBundleProvider(theRequestDetails, searchToUse.getUuid(), theCallingDao, mySearchBuilderFactory);
retVal.setCacheHit(true);
populateBundleProvider(retVal);
@ -776,10 +774,11 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
if (theResultIter.hasNext() == false) {
int skippedCount = theResultIter.getSkippedCount();
int nonSkippedCount = theResultIter.getNonSkippedCount();
int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass;
ourLog.trace("MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", myMaxResultsToFetch, skippedCount, myCountSavedThisPass, myCountSavedTotal, myAdditionalPrefetchThresholdsRemaining);
if (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch) {
if (nonSkippedCount == 0 || (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) {
ourLog.trace("Setting search status to FINISHED");
mySearch.setStatus(SearchStatusEnum.FINISHED);
mySearch.setTotalCount(myCountSavedTotal);
@ -1142,7 +1141,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
theSearch.setDeleted(false);
theSearch.setUuid(theSearchUuid);
theSearch.setCreated(new Date());
theSearch.setSearchLastReturned(new Date());
theSearch.setTotalCount(null);
theSearch.setNumFound(0);
theSearch.setPreferredPageSize(theParams.getCount());

View File

@ -32,7 +32,7 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.SEARCH_CLEANUP_JOB_INTERVAL_MILLIS;
/**
* Deletes old searches
@ -62,7 +62,7 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(getClass().getName());
jobDetail.setJobClass(Job.class);
mySchedulerService.scheduleClusteredJob(DEFAULT_CUTOFF_SLACK, jobDetail);
mySchedulerService.scheduleClusteredJob(SEARCH_CLEANUP_JOB_INTERVAL_MILLIS, jobDetail);
}
public static class Job implements HapiJob {

View File

@ -1,87 +0,0 @@
package ca.uhn.fhir.jpa.search.cache;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import org.apache.commons.lang3.time.DateUtils;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc {
@Autowired
private PlatformTransactionManager myTxManager;
@Autowired
private ISchedulerService mySchedulerService;
private ConcurrentHashMap<Long, Date> myUnsyncedLastUpdated = new ConcurrentHashMap<>();
@Override
public void updateSearchLastReturned(Search theSearch, Date theDate) {
myUnsyncedLastUpdated.put(theSearch.getId(), theDate);
}
@PostConstruct
public void scheduleJob() {
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(getClass().getName());
jobDetail.setJobClass(Job.class);
mySchedulerService.scheduleLocalJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail);
}
@Override
public void flushLastUpdated() {
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.execute(t -> {
for (Iterator<Map.Entry<Long, Date>> iter = myUnsyncedLastUpdated.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry<Long, Date> next = iter.next();
flushLastUpdated(next.getKey(), next.getValue());
iter.remove();
}
return null;
});
}
protected abstract void flushLastUpdated(Long theSearchId, Date theLastUpdated);
public static class Job implements HapiJob {
@Autowired
private ISearchCacheSvc myTarget;
@Override
public void execute(JobExecutionContext theContext) {
myTarget.flushLastUpdated();
}
}
}

View File

@ -47,7 +47,7 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
/*
* Be careful increasing this number! We use the number of params here in a
* DELETE FROM foo WHERE params IN (term,term,term...)
@ -55,7 +55,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
*/
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT = 500;
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS = 20000;
public static final long DEFAULT_CUTOFF_SLACK = 10 * DateUtils.MILLIS_PER_SECOND;
public static final long SEARCH_CLEANUP_JOB_INTERVAL_MILLIS = 10 * DateUtils.MILLIS_PER_SECOND;
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
private static int ourMaximumResultsToDeleteInOneStatement = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT;
private static int ourMaximumResultsToDeleteInOnePass = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS;
@ -65,7 +65,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
* is being reused (because a new client request came in with the same params) right before
* the result is to be deleted
*/
private long myCutoffSlack = DEFAULT_CUTOFF_SLACK;
private long myCutoffSlack = SEARCH_CLEANUP_JOB_INTERVAL_MILLIS;
@Autowired
private ISearchDao mySearchDao;
@ -146,11 +146,6 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
}
@Override
protected void flushLastUpdated(Long theSearchId, Date theLastUpdated) {
mySearchDao.updateSearchLastReturned(theSearchId, theLastUpdated);
}
@Transactional(Transactional.TxType.NEVER)
@Override
public void pollForStaleSearchesAndDeleteThem() {
@ -160,7 +155,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
long cutoffMillis = myDaoConfig.getExpireSearchResultsAfterMillis();
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
cutoffMillis = Math.max(cutoffMillis, myDaoConfig.getReuseCachedSearchResultsForMillis());
cutoffMillis = cutoffMillis + myDaoConfig.getReuseCachedSearchResultsForMillis();
}
final Date cutoff = new Date((now() - cutoffMillis) - myCutoffSlack);
@ -172,7 +167,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
TransactionTemplate tt = new TransactionTemplate(myTxManager);
final Slice<Long> toDelete = tt.execute(theStatus ->
mySearchDao.findWhereLastReturnedBefore(cutoff, new Date(), PageRequest.of(0, 2000))
mySearchDao.findWhereCreatedBefore(cutoff, new Date(), PageRequest.of(0, 2000))
);
assert toDelete != null;
@ -223,7 +218,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
// Only delete if we don't have results left in this search
if (resultPids.getNumberOfElements() < max) {
ourLog.debug("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
ourLog.debug("Deleting search {}/{} - Created[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()));
mySearchDao.deleteByPid(searchToDelete.getId());
} else {
ourLog.debug("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid());

View File

@ -78,20 +78,6 @@ public interface ISearchCacheSvc {
*/
Collection<Search> findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter);
/**
* Mark a search as having been "last used" at the given time. This method may (and probably should) be implemented
* to work asynchronously in order to avoid hammering the database if the search gets reused many times.
*
* @param theSearch The search
* @param theDate The "last returned" timestamp
*/
void updateSearchLastReturned(Search theSearch, Date theDate);
/**
* This is mostly public for unit tests
*/
void flushLastUpdated();
/**
* This method will be called periodically to delete stale searches. Implementations are not required to do anything
* if they have some other mechanism for expiring stale results other than manually looking for them

View File

@ -66,7 +66,13 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider.RESOURCE_ID;
@ -136,8 +142,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails();
jobDetails.setJobId(UUID.randomUUID().toString());
jobDetails.setRemainingResourceIds(resourceIds.stream().map(t->t.getValue()).collect(Collectors.toList()));
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t->t.getValue()).collect(Collectors.toList()));
jobDetails.setRemainingResourceIds(resourceIds.stream().map(t -> t.getValue()).collect(Collectors.toList()));
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t -> t.getValue()).collect(Collectors.toList()));
if (theSubscriptionId != null) {
jobDetails.setSubscriptionId(theSubscriptionId.getIdPart());
}
@ -386,7 +392,13 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(getClass().getName());
jobDetail.setJobClass(Job.class);
mySchedulerService.scheduleLocalJob(60 * DateUtils.MILLIS_PER_SECOND, jobDetail);
// Currently jobs ae kept in a local ArrayList so this should be a local job, and
// it can fire frequently without adding load
mySchedulerService.scheduleLocalJob(5 * DateUtils.MILLIS_PER_SECOND, jobDetail);
}
public int getActiveJobCount() {
return myActiveJobs.size();
}
public static class Job implements HapiJob {

View File

@ -1414,7 +1414,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc, ApplicationCo
TermConceptMapGroupElement termConceptMapGroupElement;
for (ConceptMap.SourceElementComponent element : group.getElement()) {
if (isBlank(element.getCode())) {
// FIXME: JA - send this to an interceptor message so it can be output
continue;
}
termConceptMapGroupElement = new TermConceptMapGroupElement();

View File

@ -388,7 +388,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
}
}
// FIXME: DM 2019-09-13 - Manually add EXTERNAL_COPYRIGHT_NOTICE property until Regenstrief adds this to loinc.xml
// TODO: DM 2019-09-13 - Manually add EXTERNAL_COPYRIGHT_NOTICE property until Regenstrief adds this to loinc.xml
if (!propertyNamesToTypes.containsKey("EXTERNAL_COPYRIGHT_NOTICE")) {
String externalCopyRightNoticeCode = "EXTERNAL_COPYRIGHT_NOTICE";
CodeSystem.PropertyType externalCopyRightNoticeType = CodeSystem.PropertyType.STRING;

View File

@ -9,10 +9,13 @@ import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.ValidateUtil;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.StructureDefinition;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
import org.hl7.fhir.exceptions.FHIRException;
@ -31,6 +34,9 @@ import java.util.List;
import java.util.Optional;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSetExpansionComponent;
/*
* #%L
@ -104,7 +110,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null).getExpansion();
return VersionConvertor_30_40.convertValueSetExpansionComponent(expandedR4);
return convertValueSetExpansionComponent(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -118,7 +124,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(valueSetToExpandR4, null);
return VersionConvertor_30_40.convertValueSet(expandedR4);
return convertValueSet(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -127,7 +133,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
@Override
protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws FHIRException {
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet((ValueSet) theValueSet);
valueSetToExpandR4 = convertValueSet((ValueSet) theValueSet);
return valueSetToExpandR4;
}
@ -139,7 +145,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = toCanonicalValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount);
return VersionConvertor_30_40.convertValueSet(expandedR4);
return convertValueSet(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -256,7 +262,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
CodeSystem codeSystem = myValidationSupport.fetchCodeSystem(myContext, theSystem);
try {
return VersionConvertor_30_40.convertCodeSystem(codeSystem);
return convertCodeSystem(codeSystem);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -323,7 +329,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null");
ValueSet valueSet = (ValueSet) theValueSet;
org.hl7.fhir.r4.model.ValueSet valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = convertValueSet(valueSet);
Coding coding = (Coding) theCoding;
org.hl7.fhir.r4.model.Coding codingR4 = null;
@ -347,7 +353,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
public boolean isValueSetPreExpandedForCodeValidation(IBaseResource theValueSet) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null");
ValueSet valueSet = (ValueSet) theValueSet;
org.hl7.fhir.r4.model.ValueSet valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = convertValueSet(valueSet);
return super.isValueSetPreExpandedForCodeValidation(valueSetR4);
}
}

View File

@ -90,21 +90,21 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
super.throwInvalidValueSet(theValueSet);
}
return expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR5), null);
return expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR5), null);
}
@Override
public IBaseResource expandValueSet(IBaseResource theInput) {
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSetInMemory(valueSetToExpand, null);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4);
return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR4);
}
@Override
public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) {
org.hl7.fhir.r4.model.ValueSet valueSetToExpand = toCanonicalValueSet(theInput);
org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4);
return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR4);
}
@Override
@ -117,8 +117,8 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
public ValueSetExpander.ValueSetExpansionOutcome expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSet valueSetToExpand = new ValueSet();
valueSetToExpand.getCompose().addInclude(theInclude);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetToExpand), null);
return new ValueSetExpander.ValueSetExpansionOutcome(org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(expandedR4));
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSetInMemory(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetToExpand), null);
return new ValueSetExpander.ValueSetExpansionOutcome(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(expandedR4));
}
@Override
@ -199,13 +199,13 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
@Override
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
CodeSystem codeSystemR5 = myValidationSupport.fetchCodeSystem(myContext, theSystem);
return org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(codeSystemR5);
return org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(codeSystemR5);
}
@Override
protected org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) {
ValueSet valueSetR5 = myValueSetResourceDao.toResource(ValueSet.class, theResourceTable, null, false);
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR5);
return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetR5);
}
@Override
@ -277,7 +277,7 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
@Override
protected org.hl7.fhir.r4.model.ValueSet toCanonicalValueSet(IBaseResource theValueSet) throws org.hl7.fhir.exceptions.FHIRException {
return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSet);
return org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet((ValueSet) theValueSet);
}
@Override

View File

@ -37,6 +37,9 @@ import org.springframework.context.event.ContextStartedEvent;
import org.springframework.context.event.EventListener;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.convertors.conv30_40.CodeSystem30_40.convertCodeSystem;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
@ -72,7 +75,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
CodeSystem resourceToStore;
try {
resourceToStore = VersionConvertor_30_40.convertCodeSystem(theCodeSystemResource);
resourceToStore = convertCodeSystem(theCodeSystemResource);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -89,7 +92,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
ConceptMap resourceToStore;
try {
resourceToStore = VersionConvertor_30_40.convertConceptMap(theConceptMap);
resourceToStore = convertConceptMap(theConceptMap);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
@ -105,7 +108,7 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
ValueSet valueSetDstu3;
try {
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
valueSetDstu3 = convertValueSet(theValueSet);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}

View File

@ -62,7 +62,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
validateCodeSystemForStorage(theCodeSystemResource);
CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(theCodeSystemResource);
CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem40_50.convertCodeSystem(theCodeSystemResource);
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
return myCodeSystemResourceDao.update(codeSystemR4, matchUrl).getId();
@ -74,7 +74,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
@Override
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(theConceptMap);
ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(theConceptMap);
if (isBlank(theConceptMap.getIdElement().getIdPart())) {
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
@ -87,7 +87,7 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple
@Override
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(theValueSet);
ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(theValueSet);
if (isBlank(theValueSet.getIdElement().getIdPart())) {
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());

View File

@ -44,14 +44,11 @@ public interface ITermLoaderSvc {
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
// FIXME: remove the default implementation before 4.1.0
default UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
// FIXME: remove the default implementation before 4.1.0
default UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
// FIXME: remove the default implementation before 4.1.0
default UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
interface FileDescriptor {

View File

@ -86,7 +86,7 @@ public class LoincHandler implements IRecordHandler {
concept.addPropertyString(nextPropertyName, nextPropertyValue);
break;
case CODING:
// FIXME: handle "Ser/Plas^Donor"
// TODO: handle "Ser/Plas^Donor"
String propertyValue = nextPropertyValue;
if (nextPropertyName.equals("COMPONENT")) {
if (propertyValue.contains("^")) {

View File

@ -1,9 +1,5 @@
package ca.uhn.fhir.jpa.dao.dstu2;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
@ -32,7 +28,11 @@ import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class);
@ -94,15 +94,14 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
myStructureDefinitionDao.create(sd, mySrd);
Observation input = new Observation();
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(sd.getUrl())));
ResourceMetadataKeyEnum.PROFILES.put(input, Collections.singletonList(new IdDt(sd.getUrl())));
input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9");
input.setStatus(ObservationStatusEnum.FINAL);
input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345");
String encoded = null;
MethodOutcome outcome = null;
String encoded;
ValidationModeEnum mode = ValidationModeEnum.CREATE;
switch (enc) {
case JSON:
@ -130,12 +129,12 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
}
@Test
public void testValidateResourceContainingProfileDeclarationInvalid() throws Exception {
public void testValidateResourceContainingProfileDeclarationInvalid() {
String methodName = "testValidateResourceContainingProfileDeclarationInvalid";
Observation input = new Observation();
String profileUri = "http://example.com/StructureDefinition/" + methodName;
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(profileUri)));
ResourceMetadataKeyEnum.PROFILES.put(input, Collections.singletonList(new IdDt(profileUri)));
input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9");
@ -148,7 +147,7 @@ public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getOperationOutcome());
ourLog.info(ooString);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved"));
assertThat(ooString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
}

View File

@ -35,7 +35,6 @@ import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search;
import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.exceptions.FHIRException;
@ -60,6 +59,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
import static org.junit.Assert.fail;
@RunWith(SpringJUnit4ClassRunner.class)
@ -386,7 +386,7 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
*/
public static ConceptMap createConceptMap() {
try {
return VersionConvertor_30_40.convertConceptMap(BaseJpaR4Test.createConceptMap());
return convertConceptMap(BaseJpaR4Test.createConceptMap());
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
}

View File

@ -15,6 +15,7 @@ import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.utils.IResourceValidator;
@ -99,7 +100,7 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
myValueSetDao.create(vs);
ValueSet expansion = myValueSetDao.expandByIdentifier("http://ccim.on.ca/fhir/iar/ValueSet/iar-citizenship-status", null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion));
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion));
// Questionnaire q = loadResourceFromClasspath(Questionnaire.class,"/dstu3/iar/Questionnaire-iar-test.xml" );
// myQuestionnaireDao.create(q);
@ -322,15 +323,11 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
ValidationModeEnum mode = ValidationModeEnum.CREATE;
String encoded = myFhirCtx.newJsonParser().encodeResourceToString(input);
try {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
fail();
} catch (PreconditionFailedException e) {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome());
ourLog.info(ooString);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved"));
}
MethodOutcome output = myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
OperationOutcome oo = (OperationOutcome) output.getOperationOutcome();
String outputString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo);
ourLog.info(outputString);
assertThat(outputString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
}
@Test

View File

@ -64,7 +64,6 @@ import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.util.AopTestUtils;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.io.IOException;
@ -409,7 +408,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
}
@Before
@Transactional()
public void beforePurgeDatabase() {
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
}

View File

@ -32,7 +32,7 @@ import javax.annotation.Nullable;
import java.util.Date;
import java.util.concurrent.atomic.AtomicLong;
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.SEARCH_CLEANUP_JOB_INTERVAL_MILLIS;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.*;
@ -47,7 +47,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
@After()
public void after() {
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
staleSearchDeletingSvc.setCutoffSlackForUnitTest(DEFAULT_CUTOFF_SLACK);
staleSearchDeletingSvc.setCutoffSlackForUnitTest(SEARCH_CLEANUP_JOB_INTERVAL_MILLIS);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(null);
}
@ -81,8 +81,11 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
Thread.sleep(10);
myDaoConfig.setExpireSearchResultsAfterMillis(1000L);
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
long reuseCachedSearchResultsForMillis = 500L;
myDaoConfig.setReuseCachedSearchResultsForMillis(reuseCachedSearchResultsForMillis);
long millisBetweenReuseAndExpire = 800L;
long expireSearchResultsAfterMillis = 1000L;
myDaoConfig.setExpireSearchResultsAfterMillis(expireSearchResultsAfterMillis);
long start = System.currentTimeMillis();
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start);
@ -107,9 +110,9 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
assertEquals(searchUuid1, searchUuid2);
TestUtil.sleepAtLeast(501);
TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1);
// We're now past 500ms so we shouldn't reuse the search
// We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search
final String searchUuid3;
{
@ -124,35 +127,36 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
// Search just got used so it shouldn't be deleted
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start + 500);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start + reuseCachedSearchResultsForMillis);
final AtomicLong search1timestamp = new AtomicLong();
final AtomicLong search2timestamp = new AtomicLong();
final AtomicLong search3timestamp = new AtomicLong();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
Search search3 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
assertNotNull(search3);
Search search1 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
assertNotNull(search1);
search1timestamp.set(search1.getCreated().getTime());
Search search2 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid2).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
assertNotNull(search2);
search3timestamp.set(search2.getSearchLastReturned().getTime());
search2timestamp.set(search2.getCreated().getTime());
Search search3 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
assertNotNull(search3);
search3timestamp.set(search3.getCreated().getTime());
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search1timestamp.get() + millisBetweenReuseAndExpire);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3));
}
});
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1));
assertTrue(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent());
assertTrue(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search1timestamp.get() + reuseCachedSearchResultsForMillis + expireSearchResultsAfterMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@ -163,14 +167,12 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 2100);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + reuseCachedSearchResultsForMillis + expireSearchResultsAfterMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
await().until(()-> newTxTemplate().execute(t -> !mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent()));
await().until(()-> newTxTemplate().execute(t -> !mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent()));
}
@Test
@ -197,7 +199,6 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
final IBundleProvider bundleProvider = myPatientDao.search(params);
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
waitForSearchToSave(bundleProvider.getUuid());
final AtomicLong start = new AtomicLong();
@ -213,9 +214,11 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
});
myDaoConfig.setExpireSearchResultsAfterMillis(500);
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + 499);
int expireSearchResultsAfterMillis = 700;
myDaoConfig.setExpireSearchResultsAfterMillis(expireSearchResultsAfterMillis);
long reuseCachedSearchResultsForMillis = 400L;
myDaoConfig.setReuseCachedSearchResultsForMillis(reuseCachedSearchResultsForMillis);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + expireSearchResultsAfterMillis - 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
txTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
@ -224,7 +227,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + 600);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + expireSearchResultsAfterMillis + reuseCachedSearchResultsForMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
txTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
@ -251,8 +254,12 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
Thread.sleep(10);
myDaoConfig.setExpireSearchResultsAfterMillis(1000L);
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
long expireSearchResultsAfterMillis = 1000L;
myDaoConfig.setExpireSearchResultsAfterMillis(expireSearchResultsAfterMillis);
long reuseCachedSearchResultsForMillis = 500L;
myDaoConfig.setReuseCachedSearchResultsForMillis(reuseCachedSearchResultsForMillis);
long start = System.currentTimeMillis();
final String searchUuid1;
@ -278,9 +285,10 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
assertEquals(searchUuid1, searchUuid2);
TestUtil.sleepAtLeast(501);
TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
// We're now past 500ms so we shouldn't reuse the search
// We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search
final String searchUuid3;
{
@ -293,37 +301,35 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
}
assertNotEquals(searchUuid1, searchUuid3);
// Search just got used so it shouldn't be deleted
waitForSearchToSave(searchUuid3);
// Search hasn't expired yet so it shouldn't be deleted
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
final AtomicLong search1timestamp = new AtomicLong();
final AtomicLong search3timestamp = new AtomicLong();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
Search search1 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
Search search3 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
assertNotNull(search3);
search1timestamp.set(search1.getCreated().getTime());
search3timestamp.set(search3.getCreated().getTime());
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search1timestamp.get() + expireSearchResultsAfterMillis + reuseCachedSearchResultsForMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3));
}
});
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
assertFalse(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
assertTrue(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent());
}
});
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 2100);
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + expireSearchResultsAfterMillis + reuseCachedSearchResultsForMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
@Override

View File

@ -47,7 +47,6 @@ import static org.junit.Assert.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {TestR4ConfigWithElasticSearch.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
@Ignore // FIXME: remove
public class FhirResourceDaoR4SearchWithElasticSearchTest extends BaseJpaTest {
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";

View File

@ -395,15 +395,11 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
ValidationModeEnum mode = ValidationModeEnum.CREATE;
String encoded = myFhirCtx.newJsonParser().encodeResourceToString(input);
try {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
fail();
} catch (PreconditionFailedException e) {
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome());
ourLog.info(ooString);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved"));
}
MethodOutcome output = myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
org.hl7.fhir.r4.model.OperationOutcome oo = (org.hl7.fhir.r4.model.OperationOutcome) output.getOperationOutcome();
String outputString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo);
ourLog.info(outputString);
assertThat(outputString, containsString("Profile reference 'http://example.com/StructureDefinition/testValidateResourceContainingProfileDeclarationInvalid' could not be resolved, so has not been checked"));
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.provider.r4.ResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.util.CoordCalculatorTest;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.UriDt;
@ -65,6 +66,7 @@ import java.math.BigDecimal;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.*;
@ -3003,7 +3005,6 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
.count(5)
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid1 = toSearchUuidFromLinkNext(result1);
Search search1 = newTxTemplate().execute(new TransactionCallback<Search>() {
@ -3012,7 +3013,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
return mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException(""));
}
});
Date lastReturned1 = search1.getSearchLastReturned();
Date created1 = search1.getCreated();
Bundle result2 = ourClient
.search()
@ -3021,7 +3022,6 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
.count(5)
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid2 = toSearchUuidFromLinkNext(result2);
Search search2 = newTxTemplate().execute(new TransactionCallback<Search>() {
@ -3030,9 +3030,9 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
return mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException(""));
}
});
Date lastReturned2 = search2.getSearchLastReturned();
Date created2 = search2.getCreated();
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
assertEquals(created2.getTime(), created1.getTime());
Thread.sleep(1500);
@ -3067,24 +3067,22 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
.forResource("Organization")
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid1 = toSearchUuidFromLinkNext(result1);
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned1 = search1.getSearchLastReturned();
Date created1 = search1.getCreated();
Bundle result2 = ourClient
.search()
.forResource("Organization")
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid2 = toSearchUuidFromLinkNext(result2);
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned2 = search2.getSearchLastReturned();
Date created2 = search2.getCreated();
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
assertEquals(created2.getTime(), created1.getTime());
assertEquals(uuid1, uuid2);
}
@ -4282,6 +4280,55 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
}
@Test
public void testNearSearchApproximate() {
Location loc = new Location();
double latitude = CoordCalculatorTest.LATITUDE_UHN;
double longitude = CoordCalculatorTest.LONGITUDE_UHN;
Location.LocationPositionComponent position = new Location.LocationPositionComponent().setLatitude(latitude).setLongitude(longitude);
loc.setPosition(position);
IIdType locId = ourClient.create().resource(loc).execute().getId().toUnqualifiedVersionless();
{ // In the box
double bigEnoughDistance = CoordCalculatorTest.DISTANCE_KM_CHIN_TO_UHN * 2;
String url = "/Location?" +
Location.SP_NEAR + "=" + CoordCalculatorTest.LATITUDE_CHIN + URLEncoder.encode(":") + CoordCalculatorTest.LONGITUDE_CHIN +
"&" +
Location.SP_NEAR_DISTANCE + "=" + bigEnoughDistance + URLEncoder.encode("|http://unitsofmeasure.org|km");
Bundle actual = ourClient
.search()
.byUrl(ourServerBase + "/" + url)
.encodedJson()
.prettyPrint()
.returnBundle(Bundle.class)
.execute();
assertEquals(1, actual.getEntry().size());
assertEquals(locId.getIdPart(), actual.getEntry().get(0).getResource().getIdElement().getIdPart());
}
{ // Outside the box
double tooSmallDistance = CoordCalculatorTest.DISTANCE_KM_CHIN_TO_UHN / 2;
String url = "/Location?" +
Location.SP_NEAR + "=" + CoordCalculatorTest.LATITUDE_CHIN + URLEncoder.encode(":") + CoordCalculatorTest.LONGITUDE_CHIN +
"&" +
Location.SP_NEAR_DISTANCE + "=" + tooSmallDistance + URLEncoder.encode("|http://unitsofmeasure.org|km");
myCaptureQueriesListener.clear();
Bundle actual = ourClient
.search()
.byUrl(ourServerBase + "/" + url)
.encodedJson()
.prettyPrint()
.returnBundle(Bundle.class)
.execute();
myCaptureQueriesListener.logSelectQueries();
assertEquals(0, actual.getEntry().size());
}
}
private String toStr(Date theDate) {
return new InstantDt(theDate).getValueAsString();
}

View File

@ -14,27 +14,15 @@ import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
import ca.uhn.fhir.rest.gclient.StringClientParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
@ -46,29 +34,16 @@ import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.*;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicNameValuePair;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Bundle.BundleLinkComponent;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.Bundle.SearchEntryMode;
import org.hl7.fhir.r4.model.Bundle.*;
import org.hl7.fhir.r4.model.Encounter.EncounterLocationComponent;
import org.hl7.fhir.r4.model.Encounter.EncounterStatus;
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
@ -77,11 +52,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.util.AopTestUtils;
import org.springframework.transaction.TransactionStatus;
@ -96,28 +67,14 @@ import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.*;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
import static ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assert.*;
@SuppressWarnings("Duplicates")
public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@ -4115,11 +4072,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.count(5)
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid1 = toSearchUuidFromLinkNext(result1);
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned1 = search1.getSearchLastReturned();
Date created1 = search1.getCreated();
Bundle result2 = ourClient
.search()
@ -4128,13 +4084,12 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.count(5)
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid2 = toSearchUuidFromLinkNext(result2);
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned2 = search2.getSearchLastReturned();
Date created2 = search2.getCreated();
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
assertEquals(created2.getTime(), created1.getTime());
Thread.sleep(1500);
@ -4145,7 +4100,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.count(5)
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
String uuid3 = toSearchUuidFromLinkNext(result3);
@ -4170,11 +4124,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.forResource("Organization")
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid1 = toSearchUuidFromLinkNext(result1);
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned1 = search1.getSearchLastReturned();
Date created1 = search1.getCreated();
sleepOneClick();
@ -4183,13 +4136,12 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.forResource("Organization")
.returnBundle(Bundle.class)
.execute();
mySearchCacheSvc.flushLastUpdated();
final String uuid2 = toSearchUuidFromLinkNext(result2);
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException("")));
Date lastReturned2 = search2.getSearchLastReturned();
Date created2 = search2.getCreated();
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
assertEquals(created2.getTime(), created1.getTime());
assertEquals(uuid1, uuid2);
}

View File

@ -9,12 +9,10 @@ import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
import ca.uhn.fhir.rest.gclient.IClientExecutable;
import ca.uhn.fhir.rest.gclient.IQuery;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.model.Bundle;
@ -25,7 +23,6 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.util.AopTestUtils;
import java.util.Date;
@ -50,7 +47,7 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
public void after() throws Exception {
super.after();
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
staleSearchDeletingSvc.setCutoffSlackForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK);
staleSearchDeletingSvc.setCutoffSlackForUnitTest(DatabaseSearchCacheSvcImpl.SEARCH_CLEANUP_JOB_INTERVAL_MILLIS);
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT);
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteInOnePassForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS);
}
@ -121,7 +118,6 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
search.setCreated(DateUtils.addDays(new Date(), -10000));
search.setSearchType(SearchTypeEnum.SEARCH);
search.setResourceType("Patient");
search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
search = mySearchEntityDao.save(search);
for (int i = 0; i < 15; i++) {
@ -163,7 +159,6 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
search.setCreated(DateUtils.addDays(new Date(), -10000));
search.setSearchType(SearchTypeEnum.SEARCH);
search.setResourceType("Patient");
search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
mySearchEntityDao.save(search);
});
@ -190,7 +185,6 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
search.setCreated(DateUtils.addDays(new Date(), -10000));
search.setSearchType(SearchTypeEnum.SEARCH);
search.setResourceType("Patient");
search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
search = mySearchEntityDao.save(search);
});

View File

@ -1,8 +1,12 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.model.api.annotation.SimpleSetter;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.TestUtil;
@ -21,6 +25,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@ -253,6 +258,67 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
);
}
@Test
public void testApplyDeltaAdd_UsingCodeSystemWithComma() throws IOException {
// Create initial codesystem
{
CodeSystem codeSystem = new CodeSystem();
codeSystem.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
codeSystem.setUrl("https://good.health");
LoggingInterceptor interceptor = new LoggingInterceptor(true);
ourClient.registerInterceptor(interceptor);
ourClient
.create()
.resource(codeSystem)
.execute();
ourClient.unregisterInterceptor(interceptor);
}
// Add a child with a really long description
Parameters outcome;
{
Parameters inputBundle = loadResourceFromClasspath(Parameters.class, "/term-delta-json.json");
LoggingInterceptor interceptor = new LoggingInterceptor(true);
ourClient.registerInterceptor(interceptor);
outcome = ourClient
.operation()
.onType(CodeSystem.class)
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD)
.withParameters(inputBundle)
.execute();
ourClient.unregisterInterceptor(interceptor);
}
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
ourLog.info(encoded);
assertThat(encoded, stringContainsInOrder(
"\"name\": \"conceptCount\"",
"\"valueInteger\": 2",
"\"name\": \"target\"",
"\"reference\": \"CodeSystem/"
));
assertHierarchyContains(
"1111222233 seq=0",
" 1111222234 seq=0"
);
runInTransaction(()->{
TermCodeSystem codeSystem = myTermCodeSystemDao.findByCodeSystemUri("https://good.health");
TermCodeSystemVersion version = codeSystem.getCurrentVersion();
TermConcept code = myTermConceptDao.findByCodeSystemAndCode(version, "1111222233").get();
assertEquals("Some label for the parent - with a dash too", code.getDisplay());
code = myTermConceptDao.findByCodeSystemAndCode(version, "1111222234").get();
assertEquals("Some very very very very very looooooong child label with a coma, another one, one more, more and final one", code.getDisplay());
});
}
@Test
public void testApplyDeltaAdd_UsingCodeSystemWithVeryLongDescription() {

View File

@ -182,41 +182,17 @@ public class SearchCoordinatorSvcImplTest {
when(mySearchBuilder.createQuery(any(), any(), any())).thenReturn(iter);
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenAnswer(t -> {
List<ResourcePersistentId> returnedValues = iter.getReturnedValues();
int offset = t.getArgument(1, Integer.class);
int end = t.getArgument(2, Integer.class);
end = Math.min(end, returnedValues.size());
offset = Math.min(offset, returnedValues.size());
ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
return returnedValues.subList(offset, end);
});
when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(allResults);
when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{
when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
Search search = t.getArgument(0, Search.class);
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
search.setStatus(SearchStatusEnum.LOADING);
return Optional.of(search);
myCurrentSearch = search;
return search;
});
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
assertNotNull(result.getUuid());
assertEquals(null, result.size());
List<IBaseResource> resources;
when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
Search search = t.getArgument(0, Search.class);
myCurrentSearch = search;
return search;
});
when(mySearchCacheSvc.fetchByUuid(any())).thenAnswer(t -> Optional.ofNullable(myCurrentSearch));
IFhirResourceDao dao = myCallingDao;
when(myDaoRegistry.getResourceDao(any(String.class))).thenReturn(dao);
resources = result.getResources(0, 100000);
List<IBaseResource> resources = result.getResources(0, 100000);
assertEquals(790, resources.size());
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
assertEquals("799", resources.get(789).getIdElement().getValueAsString());
@ -604,6 +580,11 @@ public class SearchCoordinatorSvcImplTest {
return myWrap.getSkippedCount();
}
@Override
public int getNonSkippedCount() {
return myCount;
}
@Override
public void close() {
// nothing
@ -613,6 +594,7 @@ public class SearchCoordinatorSvcImplTest {
public static class ResultIterator extends BaseIterator<ResourcePersistentId> implements IResultIterator {
private final Iterator<ResourcePersistentId> myWrap;
private int myCount;
ResultIterator(Iterator<ResourcePersistentId> theWrap) {
myWrap = theWrap;
@ -625,6 +607,7 @@ public class SearchCoordinatorSvcImplTest {
@Override
public ResourcePersistentId next() {
myCount++;
return myWrap.next();
}
@ -633,6 +616,11 @@ public class SearchCoordinatorSvcImplTest {
return 0;
}
@Override
public int getNonSkippedCount() {
return myCount;
}
@Override
public void close() {
// nothing
@ -699,6 +687,11 @@ public class SearchCoordinatorSvcImplTest {
}
}
@Override
public int getNonSkippedCount() {
return 0;
}
@Override
public void close() {
// nothing

View File

@ -58,6 +58,7 @@ public class MatchUrlServiceTest extends BaseJpaTest {
Location.SP_NEAR + "=1000.0:2000.0" +
"&" +
Location.SP_NEAR_DISTANCE + "=" + kmDistance + "|http://unitsofmeasure.org|km", ourCtx.getResourceDefinition("Location"));
map.setLocationDistance();
QuantityParam nearDistanceParam = map.getNearDistanceParam();
assertEquals(1, map.size());
@ -74,6 +75,8 @@ public class MatchUrlServiceTest extends BaseJpaTest {
"&" +
Location.SP_NEAR_DISTANCE + "=2|http://unitsofmeasure.org|km",
ourCtx.getResourceDefinition("Location"));
map.setLocationDistance();
fail();
} catch (IllegalArgumentException e) {
assertEquals("Only one " + Location.SP_NEAR_DISTANCE + " parameter may be present", e.getMessage());
@ -89,7 +92,8 @@ public class MatchUrlServiceTest extends BaseJpaTest {
"," +
"2|http://unitsofmeasure.org|km",
ourCtx.getResourceDefinition("Location"));
map.setLoadSynchronous(true);
map.setLocationDistance();
fail();
} catch (IllegalArgumentException e) {
assertEquals("Only one " + Location.SP_NEAR_DISTANCE + " parameter may be present", e.getMessage());

View File

@ -17,7 +17,6 @@ import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import com.google.common.collect.Lists;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
@ -39,14 +38,14 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.TestPropertySource;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@ -56,10 +55,6 @@ import static org.junit.Assert.fail;
* Test the rest-hook subscriptions
*/
@SuppressWarnings("Duplicates")
@TestPropertySource(properties = {
UnregisterScheduledProcessor.SCHEDULING_DISABLED + "=false"
})
@DirtiesContext
public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SubscriptionTriggeringDstu3Test.class);
@ -194,6 +189,9 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
waitForQueueToDrain();
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(0, ourCreatedObservations);
waitForSize(2, ourUpdatedObservations);
@ -248,6 +246,13 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
responseValue = response.getParameter().get(0).getValue().primitiveValue();
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(33, ourUpdatedObservations);
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(51, ourUpdatedObservations);
waitForSize(0, ourCreatedObservations);
waitForSize(0, ourCreatedPatients);
@ -257,8 +262,6 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
@Test
public void testTriggerUsingOrSeparatedList_MultipleStrings() throws Exception {
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(13, 22, 100));
String payload = "application/fhir+json";
IdType sub2id = createSubscription("Patient?", payload, ourListenerServerBase).getIdElement();
@ -284,8 +287,18 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
String responseValue = response.getParameter().get(0).getValue().primitiveValue();
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
waitForSize(0, ourCreatedPatients);
waitForSize(3, ourUpdatedPatients);
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
assertEquals(0, mySubscriptionTriggeringSvc.getActiveJobCount());
assertEquals(0, ourCreatedPatients.size());
await().until(() -> ourUpdatedPatients.size() == 3);
}
@ -316,6 +329,8 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
String responseValue = response.getParameter().get(0).getValue().primitiveValue();
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(0, ourCreatedPatients);
waitForSize(3, ourUpdatedPatients);
@ -367,6 +382,9 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
responseValue = response.getParameter().get(0).getValue().primitiveValue();
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
mySubscriptionTriggeringSvc.runDeliveryPass();
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(10, ourUpdatedObservations);
waitForSize(0, ourCreatedObservations);
waitForSize(0, ourCreatedPatients);
@ -421,6 +439,8 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
String responseValue = response.getParameter().get(0).getValue().primitiveValue();
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(20, ourUpdatedObservations);
waitForSize(0, ourCreatedObservations);
waitForSize(0, ourCreatedPatients);
@ -458,6 +478,8 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
waitForQueueToDrain();
mySubscriptionTriggeringSvc.runDeliveryPass();
waitForSize(0, ourCreatedObservations);
waitForSize(1, ourUpdatedObservations);

View File

@ -54,8 +54,8 @@ public class ValidatorAcrossVersionsTest {
ourLog.info(ctxDstu2.newJsonParser().setPrettyPrint(true).encodeResourceToString(result.toOperationOutcome()));
assertEquals(2, result.getMessages().size());
assertEquals("No questionnaire is identified, so no validation can be performed against the base questionnaire", result.getMessages().get(0).getMessage());
assertEquals("Profile http://hl7.org/fhir/StructureDefinition/QuestionnaireResponse, Element 'QuestionnaireResponse.status': minimum required = 1, but only found 0", result.getMessages().get(1).getMessage());
assertEquals("Profile http://hl7.org/fhir/StructureDefinition/QuestionnaireResponse, Element 'QuestionnaireResponse.status': minimum required = 1, but only found 0", result.getMessages().get(0).getMessage());
assertEquals("No questionnaire is identified, so no validation can be performed against the base questionnaire", result.getMessages().get(1).getMessage());
}
}

View File

@ -0,0 +1,30 @@
{
"resourceType": "Parameters",
"parameter": [
{
"name": "system",
"valueUri": "https://good.health"
},
{
"name": "codeSystem",
"resource": {
"resourceType": "CodeSystem",
"status": "active",
"content": "not-present",
"url": "https://good.health",
"concept": [
{
"code": "1111222233",
"display": "Some label for the parent - with a dash too",
"concept": [
{
"code": "1111222234",
"display": "Some very very very very very looooooong child label with a coma, another one, one more, more and final one"
}
]
}
]
}
}
]
}

Some files were not shown because too many files have changed in this diff Show More