Merge branch 'master' into master

This commit is contained in:
James Agnew 2018-09-07 23:25:49 -04:00 committed by GitHub
commit 29f324f8de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
371 changed files with 78805 additions and 15425 deletions

32
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,32 @@
---
name: Bug report
about: Create a report to help us improve
---
NOTE: Before filing a ticket, please see the following URL:
https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Environment (please complete the following information):**
- HAPI FHIR Version
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
**Additional context**
Add any other context about the problem here.

View File

@ -19,11 +19,14 @@ cache:
install: /bin/true
# This seems to be required to get travis to set Xmx4g, per https://github.com/travis-ci/travis-ci/issues/3893
before_script:
# This seems to be required to get travis to set Xmx4g, per https://github.com/travis-ci/travis-ci/issues/3893
- export MAVEN_SKIP_RC=true
# Sometimes things get restored from the cache with bad permissions. See https://github.com/travis-ci/travis-ci/issues/9630
- sudo chmod -R 777 "$HOME/.m2/repository";
- sudo chown -R travis:travis "$HOME/.m2/repository";
script:
# - mvn -e -B clean install && cd hapi-fhir-ra && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID clean test jacoco:report coveralls:report
# - mvn -Dci=true -e -B -P ALLMODULES,NOPARALLEL,ERRORPRONE clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report
- mvn -Dci=true -e -B -P ALLMODULES,REDUCED_JPA_TESTS,ERRORPRONE,JACOCO clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report
- mvn -Dci=true -e -B -P ALLMODULES,REDUCED_JPA_TESTS,ERRORPRONE,JACOCO clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report;

View File

@ -17,3 +17,5 @@ A demonstration of this project is available here:
http://hapi.fhir.org/
This project is Open Source, licensed under the Apache Software License 2.0.
Please see [this wiki page](https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help) for information on where to get help with HAPI FHIR. Please see [Smile CDR](https://smilecdr.com) for information on commercial support.

View File

@ -4,8 +4,8 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>2.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
<version>2.5-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>hapi-fhir-jaxrs-sse</artifactId>
<build>

View File

@ -31,12 +31,6 @@
<dependencies>
<dependency>
<groupId>org.opencds.cqf</groupId>
<artifactId>cqf-ruler</artifactId>
<version>0.1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>websocket-api</artifactId>
@ -90,6 +84,12 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.opencds.cqf</groupId>
<artifactId>cqf-ruler</artifactId>
<version>0.1.2-SNAPSHOT</version>
</dependency>
<!-- HAPI-FHIR uses Logback for logging support. The logback library is included automatically by Maven as a part of the hapi-fhir-base dependency, but you also need to include a logging library. Logback
is used here, but log4j would also be fine. -->
<dependency>

View File

@ -8,7 +8,7 @@ public class CdsHooksServerExample extends CdsServicesServlet {
// protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// // Change how requests are handled
// }
//
// @Override
// protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// // Change discovery response

View File

@ -1,165 +1,19 @@
package ca.uhn.fhir.jpa.cds.example;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
import ca.uhn.fhir.jpa.rp.dstu3.ActivityDefinitionResourceProvider;
import ca.uhn.fhir.jpa.rp.dstu3.MeasureResourceProvider;
import ca.uhn.fhir.jpa.rp.dstu3.PlanDefinitionResourceProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.ETagSupportEnum;
import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Meta;
import org.opencds.cqf.providers.FHIRActivityDefinitionResourceProvider;
import org.opencds.cqf.providers.FHIRMeasureResourceProvider;
import org.opencds.cqf.providers.FHIRPlanDefinitionResourceProvider;
import org.springframework.web.context.ContextLoaderListener;
import org.springframework.web.context.WebApplicationContext;
import org.opencds.cqf.servlet.BaseServlet;
import javax.servlet.ServletException;
import java.util.Collection;
import java.util.List;
public class CdsServerExample extends RestfulServer {
public class CdsServerExample extends BaseServlet {
@SuppressWarnings("unchecked")
@Override
protected void initialize() throws ServletException {
super.initialize();
// Default setup - STU3 support only
// Source project location: https://github.com/DBCG/cqf-ruler
FhirVersionEnum fhirVersion = FhirVersionEnum.DSTU3;
setFhirContext(new FhirContext(fhirVersion));
@SuppressWarnings("unchecked")
@Override
protected void initialize() throws ServletException {
super.initialize();
// Get the spring context from the web container (it's declared in web.xml)
WebApplicationContext myAppCtx = ContextLoaderListener.getCurrentWebApplicationContext();
if (myAppCtx == null) {
throw new ServletException("Error retrieving spring context from the web container");
}
String resourceProviderBeanName = "myResourceProvidersDstu3";
List<IResourceProvider> beans = myAppCtx.getBean(resourceProviderBeanName, List.class);
setResourceProviders(beans);
Object systemProvider = myAppCtx.getBean("mySystemProviderDstu3", JpaSystemProviderDstu3.class);
setPlainProviders(systemProvider);
/*
* The conformance provider exports the supported resources, search parameters, etc for
* this server. The JPA version adds resource counts to the exported statement, so it
* is a nice addition.
*/
IFhirSystemDao<Bundle, Meta> systemDao = myAppCtx.getBean("mySystemDaoDstu3", IFhirSystemDao.class);
JpaConformanceProviderDstu3 confProvider =
new JpaConformanceProviderDstu3(this, systemDao, myAppCtx.getBean(DaoConfig.class));
confProvider.setImplementationDescription("Example Server");
setServerConformanceProvider(confProvider);
/*
* Enable ETag Support (this is already the default)
*/
setETagSupport(ETagSupportEnum.ENABLED);
/*
* This server tries to dynamically generate narratives
*/
FhirContext ctx = getFhirContext();
ctx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
/*
* Default to JSON and pretty printing
*/
setDefaultPrettyPrint(true);
setDefaultResponseEncoding(EncodingEnum.JSON);
/*
* -- New in HAPI FHIR 1.5 --
* This configures the server to page search results to and from
* the database, instead of only paging them to memory. This may mean
* a performance hit when performing searches that return lots of results,
* but makes the server much more scalable.
*/
setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class));
/*
* Load interceptors for the server from Spring (these are defined in FhirServerConfig.java)
*/
Collection<IServerInterceptor> interceptorBeans = myAppCtx.getBeansOfType(IServerInterceptor.class).values();
for (IServerInterceptor interceptor : interceptorBeans) {
this.registerInterceptor(interceptor);
}
/*
* Adding resource providers from the cqf-ruler
*/
// Measure processing
FHIRMeasureResourceProvider measureProvider = new FHIRMeasureResourceProvider(getResourceProviders());
MeasureResourceProvider jpaMeasureProvider = (MeasureResourceProvider) getProvider("Measure");
measureProvider.setDao(jpaMeasureProvider.getDao());
measureProvider.setContext(jpaMeasureProvider.getContext());
// PlanDefinition processing
FHIRPlanDefinitionResourceProvider planDefProvider = new FHIRPlanDefinitionResourceProvider(getResourceProviders());
PlanDefinitionResourceProvider jpaPlanDefProvider =
(PlanDefinitionResourceProvider) getProvider("PlanDefinition");
planDefProvider.setDao(jpaPlanDefProvider.getDao());
planDefProvider.setContext(jpaPlanDefProvider.getContext());
// ActivityDefinition processing
FHIRActivityDefinitionResourceProvider actDefProvider = new FHIRActivityDefinitionResourceProvider(getResourceProviders());
ActivityDefinitionResourceProvider jpaActDefProvider =
(ActivityDefinitionResourceProvider) getProvider("ActivityDefinition");
actDefProvider.setDao(jpaActDefProvider.getDao());
actDefProvider.setContext(jpaActDefProvider.getContext());
try {
unregisterProvider(jpaMeasureProvider);
unregisterProvider(jpaPlanDefProvider);
unregisterProvider(jpaActDefProvider);
} catch (Exception e) {
throw new ServletException("Unable to unregister provider: " + e.getMessage());
}
registerProvider(measureProvider);
registerProvider(planDefProvider);
registerProvider(actDefProvider);
/*
* If you are hosting this server at a specific DNS name, the server will try to
* figure out the FHIR base URL based on what the web container tells it, but
* this doesn't always work. If you are setting links in your search bundles that
* just refer to "localhost", you might want to use a server address strategy:
*/
//setServerAddressStrategy(new HardcodedServerAddressStrategy("http://mydomain.com/fhir/baseDstu2"));
/*
* If you are using DSTU3+, you may want to add a terminology uploader, which allows
* uploading of external terminologies such as Snomed CT. Note that this uploader
* does not have any security attached (any anonymous user may use it by default)
* so it is a potential security vulnerability. Consider using an AuthorizationInterceptor
* with this feature.
*/
registerProvider(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class));
}
public IResourceProvider getProvider(String name) {
for (IResourceProvider res : getResourceProviders()) {
if (res.getResourceType().getSimpleName().equals(name)) {
return res;
}
}
throw new IllegalArgumentException("This should never happen!");
}
// Add additional config and/or resource providers
}
}

View File

@ -1,125 +0,0 @@
package ca.uhn.fhir.jpa.cds.example;
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect;
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu3;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.time.DateUtils;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.Properties;
/**
* This is the primary configuration file for the example server
*/
@Configuration
@EnableTransactionManagement()
public class FhirServerConfig extends BaseJavaConfigDstu3 {
/**
* Configure FHIR properties around the the JPA server via this bean
*/
@Bean()
public DaoConfig daoConfig() {
DaoConfig retVal = new DaoConfig();
retVal.setSubscriptionEnabled(true);
retVal.setSubscriptionPollDelay(5000);
retVal.setSubscriptionPurgeInactiveAfterMillis(DateUtils.MILLIS_PER_HOUR);
retVal.setAllowMultipleDelete(true);
return retVal;
}
/**
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
* directory called "jpaserver_derby_files".
*
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
*/
@Bean(destroyMethod = "close")
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
retVal.setUsername("");
retVal.setPassword("");
return retVal;
}
@Override
@Bean()
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());
return retVal;
}
private Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.dialect", DerbyTenSevenHapiFhirDialect.class.getName());
extraProperties.put("hibernate.format_sql", "true");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
extraProperties.put("hibernate.cache.use_structured_entries", "false");
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
extraProperties.put("hibernate.search.default.directory_provider", "filesystem");
extraProperties.put("hibernate.search.default.indexBase", "target/lucenefiles");
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
// extraProperties.put("hibernate.search.default.worker.execution", "async");
return extraProperties;
}
/**
* Do some fancy logging to create a nice access log that has details about each incoming request.
*/
public IServerInterceptor loggingInterceptor() {
LoggingInterceptor retVal = new LoggingInterceptor();
retVal.setLoggerName("fhirtest.access");
retVal.setMessageFormat(
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
retVal.setLogExceptions(true);
retVal.setErrorMessageFormat("ERROR - ${requestVerb} ${requestUrl}");
return retVal;
}
/**
* This interceptor adds some pretty syntax highlighting in responses when a browser is detected
*/
@Bean(autowire = Autowire.BY_TYPE)
public IServerInterceptor responseHighlighterInterceptor() {
ResponseHighlighterInterceptor retVal = new ResponseHighlighterInterceptor();
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IServerInterceptor subscriptionSecurityInterceptor() {
SubscriptionsRequireManualActivationInterceptorDstu3 retVal = new SubscriptionsRequireManualActivationInterceptorDstu3();
return retVal;
}
@Bean()
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;
}
}

View File

@ -1,56 +0,0 @@
package ca.uhn.fhir.jpa.cds.example;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.to.FhirTesterMvcConfig;
import ca.uhn.fhir.to.TesterConfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
//@formatter:off
/**
* This spring config file configures the web testing module. It serves two
* purposes:
* 1. It imports FhirTesterMvcConfig, which is the spring config for the
* tester itself
* 2. It tells the tester which server(s) to talk to, via the testerConfig()
* method below
*/
@Configuration
@Import(FhirTesterMvcConfig.class)
public class FhirTesterConfig {
/**
* This bean tells the testing webpage which servers it should configure itself
* to communicate with. In this example we configure it to talk to the local
* server, as well as one public server. If you are creating a project to
* deploy somewhere else, you might choose to only put your own server's
* address here.
*
* Note the use of the ${serverBase} variable below. This will be replaced with
* the base URL as reported by the server itself. Often for a simple Tomcat
* (or other container) installation, this will end up being something
* like "http://localhost:8080/hapi-fhir-jpaserver-example". If you are
* deploying your server to a place with a fully qualified domain name,
* you might want to use that instead of using the variable.
*/
@Bean
public TesterConfig testerConfig() {
TesterConfig retVal = new TesterConfig();
retVal
.addServer()
.withId("home")
.withFhirVersion(FhirVersionEnum.DSTU3)
.withBaseUrl("${serverBase}/baseDstu3")
.withName("Local Tester")
.addServer()
.withId("hapi")
.withFhirVersion(FhirVersionEnum.DSTU3)
.withBaseUrl("http://fhirtest.uhn.ca/baseDstu3")
.withName("Public HAPI Test Server");
return retVal;
}
}
//@formatter:on

View File

@ -1,5 +1,5 @@
<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="3.0"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee ./xsd/web-app_3_0.xsd">
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee ./xsd/web-app_3_0.xsd" metadata-complete="true">
<listener>
<listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
@ -13,7 +13,7 @@
<context-param>
<param-name>contextConfigLocation</param-name>
<param-value>
ca.uhn.fhir.jpa.cds.example.FhirServerConfig
org.opencds.cqf.config.FhirServerConfigDstu3
</param-value>
</context-param>
@ -34,17 +34,17 @@
</init-param>
<init-param>
<param-name>contextConfigLocation</param-name>
<param-value>ca.uhn.fhir.jpa.cds.example.FhirTesterConfig</param-value>
<param-value>org.opencds.cqf.config.FhirTesterConfigDstu3</param-value>
</init-param>
<load-on-startup>2</load-on-startup>
</servlet>
<servlet>
<servlet-name>fhirServlet</servlet-name>
<servlet-name>cdsServerExample</servlet-name>
<servlet-class>ca.uhn.fhir.jpa.cds.example.CdsServerExample</servlet-class>
<init-param>
<param-name>ImplementationDescription</param-name>
<param-value>FHIR JPA Server</param-value>
<param-value>FHIR CQF Ruler-of-All-Knowledge JPA Server</param-value>
</init-param>
<init-param>
<param-name>FhirVersion</param-name>
@ -53,6 +53,16 @@
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>cdsServerExample</servlet-name>
<url-pattern>/baseDstu3/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>spring</servlet-name>
<url-pattern>/tester/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>cdsServicesServlet</servlet-name>
<url-pattern>/cds-services</url-pattern>
@ -63,18 +73,6 @@
<url-pattern>/cds-services/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>fhirServlet</servlet-name>
<url-pattern>/baseDstu3/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>spring</servlet-name>
<url-pattern>/</url-pattern>
</servlet-mapping>
<!-- This filters provide support for Cross Origin Resource Sharing (CORS) -->
<filter>
<filter-name>CORS Filter</filter-name>
@ -92,7 +90,7 @@
<init-param>
<description>A comma separated list of allowed headers when making a non simple CORS request.</description>
<param-name>cors.allowed.headers</param-name>
<param-value>X-FHIR-Starter,Origin,Accept,X-Requested-With,Content-Type,Access-Control-Request-Method,Access-Control-Request-Headers,Prefer</param-value>
<param-value>X-FHIR-Starter,Origin,Accept,Authorization,X-Requested-With,Content-Type,Access-Control-Request-Method,Access-Control-Request-Headers</param-value>
</init-param>
<init-param>
<description>A comma separated list non-standard response headers that will be exposed to XHR2 object.</description>
@ -120,5 +118,4 @@
<url-pattern>/*</url-pattern>
</filter-mapping>
</web-app>

View File

@ -40,6 +40,7 @@ public class CdsExampleTests {
@BeforeClass
public static void beforeClass() throws Exception {
// Configure and spin up server
String path = Paths.get("").toAbsolutePath().toString();
ourPort = RandomServerPortProvider.findFreePort();
@ -60,11 +61,10 @@ public class CdsExampleTests {
ourClient = ourCtx.newRestfulGenericClient(ourServerBase);
ourClient.registerInterceptor(new LoggingInterceptor(true));
// Load test data
// Normally, I would use a transaction bundle, but issues with the random ports prevents that...
// So, doing it the old-fashioned way =)
// Load terminology for measure tests (HEDIS measures)
putResource("measure-terminology-bundle.json", "");
// General
// load test data and conversion library for $apply operation tests
putResource("general-practitioner.json", "Practitioner-12208");
putResource("general-patient.json", "Patient-12214");
putResource("general-fhirhelpers-3.json", "FHIRHelpers");
@ -79,31 +79,129 @@ public class CdsExampleTests {
InputStream is = CdsExampleTests.class.getResourceAsStream(resourceFileName);
Scanner scanner = new Scanner(is).useDelimiter("\\A");
String json = scanner.hasNext() ? scanner.next() : "";
IBaseResource resource = ourCtx.newJsonParser().parseResource(json);
ourClient.update(id, resource);
boolean isJson = resourceFileName.endsWith("json");
IBaseResource resource = isJson ? ourCtx.newJsonParser().parseResource(json) : ourCtx.newXmlParser().parseResource(json);
if (resource instanceof Bundle) {
ourClient.transaction().withBundle((Bundle) resource).execute();
}
else {
ourClient.update().resource(resource).withId(id).execute();
}
}
/*
*
* Testing Individual Measure
* This test patient satisfies all the group population criteria for this measure.
*
* */
@Test
public void MeasureProcessingTest() {
putResource("measure-processing-library.json", "col-logic");
putResource("measure-processing-measure.json", "col");
putResource("measure-processing-procedure.json", "Procedure-9");
putResource("measure-processing-condition.json", "Condition-13");
putResource("measure-processing-valueset-1.json", "2.16.840.1.113883.3.464.1003.108.11.1001");
putResource("measure-processing-valueset-2.json", "2.16.840.1.113883.3.464.1003.198.12.1019");
putResource("measure-processing-valueset-3.json", "2.16.840.1.113883.3.464.1003.108.12.1020");
putResource("measure-processing-valueset-4.json", "2.16.840.1.113883.3.464.1003.198.12.1010");
putResource("measure-processing-valueset-5.json", "2.16.840.1.113883.3.464.1003.198.12.1011");
public void PatientMeasureTest() {
// load measure specific test data
putResource("patient-measure-test-bundle.json", "");
Parameters inParams = new Parameters();
inParams.addParameter().setName("patient").setValue(new StringType("Patient-12214"));
inParams.addParameter().setName("startPeriod").setValue(new DateType("2001-01-01"));
inParams.addParameter().setName("endPeriod").setValue(new DateType("2015-03-01"));
inParams.addParameter().setName("patient").setValue(new StringType("Patient/Patient-6529"));
inParams.addParameter().setName("periodStart").setValue(new DateType("2003-01-01"));
inParams.addParameter().setName("periodEnd").setValue(new DateType("2003-12-31"));
Parameters outParams = ourClient
.operation()
.onInstance(new IdDt("Measure", "col"))
.named("$evaluate")
.onInstance(new IdDt("Measure", "measure-asf"))
.named("$evaluate-measure")
.withParameters(inParams)
.useHttpGet()
.execute();
List<Parameters.ParametersParameterComponent> response = outParams.getParameter();
Assert.assertTrue(!response.isEmpty());
Parameters.ParametersParameterComponent component = response.get(0);
Assert.assertTrue(component.getResource() instanceof MeasureReport);
MeasureReport report = (MeasureReport) component.getResource();
for (MeasureReport.MeasureReportGroupComponent group : report.getGroup()) {
for (MeasureReport.MeasureReportGroupPopulationComponent population : group.getPopulation()) {
Assert.assertTrue(population.getCount() > 0);
}
}
}
/*
*
* Testing Patient List Measure
* This test is only testing for valid initial population membership.
* There are 2 patients that reference Practitioner-2520 as their general practitioner.
* However, only one meets the initial population criteria for the measure.
*
* */
@Test
public void PatientListMeasureTest() {
// load measure specific test data
putResource("patient-list-measure-test-bundle.json", "");
Parameters inParams = new Parameters();
inParams.addParameter().setName("reportType").setValue(new StringType("patient-list"));
inParams.addParameter().setName("practitioner").setValue(new StringType("Practitioner/Practitioner-2520"));
inParams.addParameter().setName("periodStart").setValue(new DateType("1997-01-01"));
inParams.addParameter().setName("periodEnd").setValue(new DateType("1997-12-31"));
Parameters outParams = ourClient
.operation()
.onInstance(new IdDt("Measure", "measure-ccs"))
.named("$evaluate-measure")
.withParameters(inParams)
.useHttpGet()
.execute();
List<Parameters.ParametersParameterComponent> response = outParams.getParameter();
Assert.assertTrue(!response.isEmpty());
Parameters.ParametersParameterComponent component = response.get(0);
Assert.assertTrue(component.getResource() instanceof MeasureReport);
MeasureReport report = (MeasureReport) component.getResource();
for (MeasureReport.MeasureReportGroupComponent group : report.getGroup()) {
for (MeasureReport.MeasureReportGroupPopulationComponent population : group.getPopulation()) {
if (population.getCode().getCodingFirstRep().getCode().equals("initial-population")) {
Assert.assertTrue(population.getCount() == 1);
}
}
}
}
/*
*
* Testing Population (or Summary) Measure
* This tests a population of 100 patients. 10 patients satisfy the initial population criteria.
* However, only 2 meet the numerator criteria.
*
* */
@Test
public void PopulationMeasureTest() {
// load measure specific test data
putResource("population-measure-network-bundle.json", "");
putResource("population-measure-patients-bundle.json", "");
putResource("population-measure-test-bundle.json", "");
Parameters inParams = new Parameters();
inParams.addParameter().setName("reportType").setValue(new StringType("population"));
inParams.addParameter().setName("periodStart").setValue(new DateType("1997-01-01"));
inParams.addParameter().setName("periodEnd").setValue(new DateType("1997-12-31"));
Parameters outParams = ourClient
.operation()
.onInstance(new IdDt("Measure", "measure-bcs"))
.named("$evaluate-measure")
.withParameters(inParams)
.useHttpGet()
.execute();
@ -121,16 +219,102 @@ public class CdsExampleTests {
Assert.assertTrue(report.getEvaluatedResources() != null);
for (MeasureReport.MeasureReportGroupComponent group : report.getGroup()) {
if (group.getIdentifier().getValue().equals("history-of-colorectal-cancer")) {
Assert.assertTrue(group.getPopulation().get(0).getCount() > 0);
}
if (group.getIdentifier().getValue().equals("history-of-total-colectomy")) {
Assert.assertTrue(group.getPopulation().get(0).getCount() > 0);
for (MeasureReport.MeasureReportGroupPopulationComponent population : group.getPopulation()) {
Assert.assertTrue(population.getCount() > 0);
}
}
}
/*
*
* Testing Patient View CDS Hook
* This tests whether a patient has had appropriate labs/orders for Breast Cancer detection.
* If not, a suggestion will be returned.
*
* */
@Test
public void PatientViewCdsHooksTest() throws IOException {
// load terminology and test data specific to hook
putResource("cds-codesystems.json", "");
putResource("cds-valuesets.json", "");
putResource("cds-bcs-bundle.json", "");
// Get the CDS Hooks request
InputStream is = this.getClass().getResourceAsStream("cds-bcs-request.json");
Scanner scanner = new Scanner(is).useDelimiter("\\A");
String cdsHooksRequest = scanner.hasNext() ? scanner.next() : "";
cdsHooksRequest = cdsHooksRequest.replace("XXXXX", ourServerBase);
byte[] data = cdsHooksRequest.getBytes("UTF-8");
URL url = new URL("http://localhost:" + ourPort + "/hapi-fhir-jpaserver-cds/cds-services/bcs-decision-support");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "application/json");
conn.setRequestProperty("Content-Length", String.valueOf(data.length));
conn.setDoOutput(true);
conn.getOutputStream().write(data);
StringBuilder response = new StringBuilder();
try(Reader in = new BufferedReader(new InputStreamReader(conn.getInputStream(), "UTF-8")))
{
for (int i; (i = in.read()) >= 0;) {
response.append((char) i);
}
}
String expected = "{\n" +
" \"cards\": [\n" +
" {\n" +
" \"summary\": \"A Mammogram procedure for the patient is recommended\",\n" +
" \"indicator\": \"warning\",\n" +
" \"detail\": \"The patient has not had a Mammogram procedure in the last 39 months\",\n" +
" \"source\": {},\n" +
" \"suggestions\": [\n" +
" {\n" +
" \"label\": \"Mammogram request\",\n" +
" \"actions\": [\n" +
" {\n" +
" \"type\": \"create\",\n" +
" \"description\": \"The patient has not had a Mammogram procedure in the last 39 months\",\n" +
" \"resource\": {\n" +
" \"resourceType\": \"ProcedureRequest\",\n" +
" \"status\": \"draft\",\n" +
" \"intent\": \"order\",\n" +
" \"code\": {\n" +
" \"coding\": [\n" +
" {\n" +
" \"system\": \"http://www.ama-assn.org/go/cpt\",\n" +
" \"code\": \"77056\",\n" +
" \"display\": \"Mammography; bilateral\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"subject\": {\n" +
" \"reference\": \"Patient/Patient-6535\"\n" +
" }\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}\n";
String withoutID = response.toString().replaceAll("\"id\":.*\\s", "");
Assert.assertTrue(
withoutID.replaceAll("\\s+", "")
.equals(expected.replaceAll("\\s+", ""))
);
}
/*
*
* Testing $apply operation for a PlanDefinition resource
* This test applies a PlanDefinition and returns a CarePlan with a dynamic property populated.
*
* */
@Test
public void PlanDefinitionApplyTest() throws ClassNotFoundException {
putResource("plandefinition-apply-library.json", "plandefinitionApplyTest");
@ -160,6 +344,12 @@ public class CdsExampleTests {
Assert.assertTrue(carePlan.getTitle().equals("This is a dynamic definition!"));
}
/*
*
* Testing $apply operation for an ActivityDefinition resource
* This test applies an ActivityDefinition and returns a ProcedureRequest with a dynamic property populated.
*
* */
@Test
public void ActivityDefinitionApplyTest() {
putResource("activitydefinition-apply-library.json", "activityDefinitionApplyTest");
@ -188,53 +378,6 @@ public class CdsExampleTests {
Assert.assertTrue(procedureRequest.getDoNotPerform());
}
@Test
@Ignore
public void CdsHooksPatientViewTest() throws IOException {
putResource("cds-bcs-library.json", "patient-view");
putResource("cds-bcs-patient.json", "Patient-6532");
putResource("cds-bcs-plandefinition.json", "bcs-decision-support");
putResource("cds-bcs-activitydefinition.json", "mammogram-service-request");
// Get the CDS Hooks request
InputStream is = this.getClass().getResourceAsStream("cds-bcs-request.json");
Scanner scanner = new Scanner(is).useDelimiter("\\A");
String cdsHooksRequest = scanner.hasNext() ? scanner.next() : "";
byte[] data = cdsHooksRequest.getBytes("UTF-8");
URL url = new URL("http://localhost:" + ourPort + "/hapi-fhir-jpaserver-cds/cds-services/bcs-decision-support");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "application/json");
conn.setRequestProperty("Content-Length", String.valueOf(data.length));
conn.setDoOutput(true);
conn.getOutputStream().write(data);
StringBuilder response = new StringBuilder();
try(Reader in = new BufferedReader(new InputStreamReader(conn.getInputStream(), "UTF-8")))
{
for (int i; (i = in.read()) >= 0;) {
response.append((char) i);
}
}
String expected = "{\n" +
" \"cards\": [\n" +
" {\n" +
" \"summary\": \"High risk for opioid overdose - taper now\",\n" +
" \"indicator\": \"warning\",\n" +
" \"detail\": \"Total morphine milligram equivalent (MME) is 20200.700mg/d. Taper to less than 50.\"\n" +
" }\n" +
" ]\n" +
"}";
Assert.assertTrue(
response.toString().replaceAll("\\s+", "")
.equals(expected.replaceAll("\\s+", ""))
);
}
}
class RandomServerPortProvider {

View File

@ -1,10 +1,6 @@
{
"resourceType": "ActivityDefinition",
"id": "ad-apply-example",
"text": {
"status": "generated",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\">ActivityDefinition $apply operation example.</div>"
},
"status": "draft",
"description": "This is a test.",
"library": [
@ -29,4 +25,4 @@
"expression": "activityDefinitionApplyTest.\"Dynamic doNotPerform Setting\""
}
]
}
}

View File

@ -1,37 +0,0 @@
{
"resourceType": "ActivityDefinition",
"id": "mammogram-service-request",
"text": {
"status": "generated",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\">Create ServiceRequest for Mammogrm Procedure</div>"
},
"status": "draft",
"description": "Create ServiceRequest for Mammogram Procedure",
"kind": "ProcedureRequest",
"code": {
"coding": [
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "77056",
"display": "Mammography; bilateral"
}
]
},
"timingTiming": {
"_event": [
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/cqif-basic-cqlExpression",
"valueString": "Now()"
}
]
}
]
},
"participant": [
{
"type": "practitioner"
}
]
}

View File

@ -1,96 +0,0 @@
{
"resourceType": "Patient",
"id": "Patient-6532",
"extension": [
{
"url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-race",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Race",
"code": "2106-3",
"display": "White"
}
]
}
},
{
"url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Ethnicity",
"code": "2186-5",
"display": "Not Hispanic or Latino"
}
]
}
},
{
"url": "http://hl7.org/fhir/us/core/StructureDefinition/us-core-religion",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/ReligiousAffiliation",
"code": "1041",
"display": "Roman Catholic Church"
}
]
}
}
],
"identifier": [
{
"use": "official",
"type": {
"coding": [
{
"system": "http://hl7.org/fhir/identifier-type",
"code": "SB",
"display": "Social Beneficiary Identifier"
}
],
"text": "US Social Security Number"
},
"system": "http://hl7.org/fhir/sid/us-ssn",
"value": "000006532"
}
],
"active": true,
"name": [
{
"family": "Brandt",
"given": [
"Edith",
"Elaine"
]
}
],
"telecom": [
{
"system": "phone",
"value": "616-555-1082",
"use": "home"
},
{
"system": "phone",
"value": "616-555-1211",
"use": "mobile"
}
],
"gender": "female",
"birthDate": "1987-07-16",
"address": [
{
"use": "home",
"type": "postal",
"line": [
"893 N Elm Drive"
],
"city": "Grand Rapids",
"district": "Kent County",
"state": "MI",
"postalCode": "49504"
}
]
}

View File

@ -1,33 +0,0 @@
{
"resourceType": "PlanDefinition",
"id": "bcs-decision-support",
"status": "draft",
"library": {
"reference": "Library/patient-view"
},
"action": [
{
"condition": [
{
"kind": "applicability",
"language": "text/cql",
"expression": "Does Patient Qualify?"
}
],
"action": [
{
"condition": [
{
"kind": "applicability",
"language": "text/cql",
"expression": "Needs Mammogram"
}
],
"definition": {
"reference": "ActivityDefinition/mammogram-service-request"
}
}
]
}
]
}

View File

@ -1,9 +1,9 @@
{
"hookInstance": "d1577c69-dfbe-44ad-ba6d-3e05e953b2ea",
"fhirServer": "https://sb-fhir-dstu2.smarthealthit.org/smartdstu2/open",
"fhirServer": "XXXXX",
"hook": "patient-view",
"user": "Practitioner/example",
"context": [],
"patient": "Patient/Patient-6535",
"prefetch": {}
}
"context": {
"patientId": "Patient/Patient-6535"
}
}

View File

@ -1,69 +1,6 @@
{
"resourceType": "Patient",
"id": "Patient-12214",
"meta": {
"versionId": "1",
"lastUpdated": "2017-07-17T16:34:10.814+00:00"
},
"text": {
"status": "generated",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\"><div class=\"hapiHeaderText\">2 <b>N GERIATRIC </b>Jr</div><table class=\"hapiPropertyTable\"><tbody><tr><td>Identifier</td><td>7f3672feb3b54789953e012d8aef5246</td></tr><tr><td>Address</td><td><span>202 Burlington Rd. </span><br/><span>Bedford </span><span>MA </span></td></tr><tr><td>Date of birth</td><td><span>07 May 1946</span></td></tr></tbody></table></div>"
},
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-race",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Race",
"code": "2106-3",
"display": "White"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-ethnicity",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Ethnicity",
"code": "2186-5",
"display": "Not Hispanic or Latino"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-religion",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/ReligiousAffiliation",
"code": "1007",
"display": "Atheism"
}
]
}
}
],
"identifier": [
{
"use": "official",
"type": {
"coding": [
{
"system": "http://hl7.org/fhir/identifier-type",
"code": "SB",
"display": "Social Beneficiary Identifier"
}
],
"text": "Michigan Common Key Service Identifier"
},
"system": "http://mihin.org/fhir/cks",
"value": "7f3672feb3b54789953e012d8aef5246"
}
],
"active": false,
"name": [
{
@ -86,17 +23,6 @@
"system": "phone",
"value": "586-555-0297",
"use": "work"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-direct",
"valueBoolean": true
}
],
"system": "email",
"value": "2.N.Geriatric@direct.mihintest.org",
"use": "home"
}
],
"gender": "male",
@ -111,4 +37,4 @@
"postalCode": "01730"
}
]
}
}

View File

@ -1,55 +1,6 @@
{
"resourceType": "Practitioner",
"id": "Practitioner-12208",
"meta": {
"versionId": "1",
"lastUpdated": "2017-07-17T16:34:10.814+00:00"
},
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-race",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Race",
"code": "2056-0",
"display": "Black"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-ethnicity",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Ethnicity",
"code": "2186-5",
"display": "Not Hispanic or Latino"
}
]
}
},
{
"url": "http://gov.onc.fhir.extension.taxonomy",
"valueCodeableConcept": {
"coding": [
{
"system": "http://org.nucc.taxonomy",
"code": "208D00000X",
"display": "General Practice"
}
]
}
},
{
"url": "http://org.mihin.fhir.extension.electronic-service",
"valueReference": {
"reference": "ElectronicService/ElectronicService-2415",
"display": "Jay.M.Sawyer@direct.mihintest.org"
}
}
],
"identifier": [
{
"use": "official",
@ -163,11 +114,7 @@
"display": "Medical Doctor"
}
]
},
"issuer": {
"reference": "Organization/Organization-2000",
"display": "Michigan Department of Licensing and Regulatory Affairs"
}
}
]
}
}

View File

@ -1,319 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<library xmlns="urn:hl7-org:elm:r1" xmlns:t="urn:hl7-org:elm-types:r1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:fhir="http://hl7.org/fhir" xmlns:a="urn:hl7-org:cql-annotations:r1">
<identifier id="COL" version="1"/>
<schemaIdentifier id="urn:hl7-org:elm" version="r1"/>
<usings>
<def localIdentifier="System" uri="urn:hl7-org:elm-types:r1"/>
<def localIdentifier="FHIR" uri="http://hl7.org/fhir" version="1.6"/>
</usings>
<parameters>
<def name="MeasurementPeriod" accessLevel="Public">
<parameterTypeSpecifier xsi:type="IntervalTypeSpecifier">
<pointType name="t:DateTime" xsi:type="NamedTypeSpecifier"/>
</parameterTypeSpecifier>
</def>
</parameters>
<codeSystems>
<def name="CPT" id="urn:oid:2.16.840.1.113883.6.12" accessLevel="Public"/>
<def name="SNOMED-CT" id="urn:oid:2.16.840.1.113883.6.96" accessLevel="Public"/>
<def name="LOINC" id="http://loinc.org" accessLevel="Public"/>
</codeSystems>
<valueSets>
<def name="Malignant Neoplasm of Colon" id="2.16.840.1.113883.3.464.1003.108.11.1001" accessLevel="Public"/>
<def name="Total Colectomy" id="2.16.840.1.113883.3.464.1003.198.12.1019" accessLevel="Public"/>
<def name="Colonoscopy" id="2.16.840.1.113883.3.464.1003.108.12.1020" accessLevel="Public"/>
<def name="Flexible Sigmoidoscopy" id="2.16.840.1.113883.3.464.1003.198.12.1010" accessLevel="Public"/>
<def name="Fecal Occult Blood Test (FOBT)" id="2.16.840.1.113883.3.464.1003.198.12.1011" accessLevel="Public"/>
</valueSets>
<statements>
<def name="Patient" context="Patient">
<expression xsi:type="SingletonFrom">
<operand dataType="fhir:Patient" xsi:type="Retrieve"/>
</expression>
</def>
<def name="Lookback Interval One Year" context="Patient" accessLevel="Public">
<expression lowClosed="true" highClosed="true" xsi:type="Interval">
<low xsi:type="Subtract">
<operand xsi:type="Start">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</operand>
<operand value="1" unit="years" xsi:type="Quantity"/>
</low>
<high xsi:type="End">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</high>
</expression>
</def>
<def name="Lookback Interval Five Years" context="Patient" accessLevel="Public">
<expression lowClosed="true" highClosed="true" xsi:type="Interval">
<low xsi:type="Subtract">
<operand xsi:type="Start">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</operand>
<operand value="5" unit="years" xsi:type="Quantity"/>
</low>
<high xsi:type="End">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</high>
</expression>
</def>
<def name="Lookback Interval Ten Years" context="Patient" accessLevel="Public">
<expression lowClosed="true" highClosed="true" xsi:type="Interval">
<low xsi:type="Subtract">
<operand xsi:type="Start">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</operand>
<operand value="10" unit="years" xsi:type="Quantity"/>
</low>
<high xsi:type="End">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</high>
</expression>
</def>
<def name="In Demographic" context="Patient" accessLevel="Public">
<expression xsi:type="GreaterOrEqual">
<operand precision="Year" xsi:type="CalculateAgeAt">
<operand path="birthDate.value" xsi:type="Property">
<source name="Patient" xsi:type="ExpressionRef"/>
</operand>
<operand xsi:type="Start">
<operand name="MeasurementPeriod" xsi:type="ParameterRef"/>
</operand>
</operand>
<operand valueType="t:Integer" value="50" xsi:type="Literal"/>
</expression>
</def>
<def name="Hx Colorectal Cancer" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="C">
<expression dataType="fhir:Condition" codeProperty="code" xsi:type="Retrieve">
<codes name="Malignant Neoplasm of Colon" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="clinicalStatus" scope="C" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="active" xsi:type="Literal"/>
</operand>
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="verificationStatus" scope="C" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="confirmed" xsi:type="Literal"/>
</operand>
</where>
</expression>
</def>
<def name="Hx Total Colectomy" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="T">
<expression dataType="fhir:Procedure" codeProperty="code" xsi:type="Retrieve">
<codes name="Total Colectomy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="T" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="completed" xsi:type="Literal"/>
</where>
</expression>
</def>
<def name="Colonoscopy Performed" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="C">
<expression dataType="fhir:Procedure" codeProperty="code" xsi:type="Retrieve">
<codes name="Colonoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="C" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="completed" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="end" xsi:type="Property">
<source path="performedPeriod" scope="C" xsi:type="Property"/>
</source>
</operand>
<operand name="Lookback Interval Ten Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="Colonoscopy Results" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="C">
<expression dataType="fhir:Observation" codeProperty="code" xsi:type="Retrieve">
<codes name="Colonoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="C" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="final" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="effectiveDateTime" scope="C" xsi:type="Property"/>
</operand>
<operand name="Lookback Interval Ten Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="Sigmoidoscopy Procedure" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="S">
<expression dataType="fhir:Procedure" codeProperty="code" xsi:type="Retrieve">
<codes name="Flexible Sigmoidoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="S" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="completed" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="end" xsi:type="Property">
<source path="performedPeriod" scope="S" xsi:type="Property"/>
</source>
</operand>
<operand name="Lookback Interval Five Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="Sigmoidoscopy Observation" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="O">
<expression dataType="fhir:Observation" codeProperty="code" xsi:type="Retrieve">
<codes name="Flexible Sigmoidoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="O" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="final" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="effectiveDateTime" scope="O" xsi:type="Property"/>
</operand>
<operand name="Lookback Interval Five Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="FOBT Procedure" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="F">
<expression dataType="fhir:Procedure" codeProperty="code" xsi:type="Retrieve">
<codes name="Fecal Occult Blood Test (FOBT)" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="F" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="completed" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="end" xsi:type="Property">
<source path="performedPeriod" scope="F" xsi:type="Property"/>
</source>
</operand>
<operand name="Lookback Interval One Year" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="FOBT Observation" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="O">
<expression dataType="fhir:Observation" codeProperty="code" xsi:type="Retrieve">
<codes name="Fecal Occult Blood Test (FOBT)" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="O" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="final" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="effectiveDateTime" scope="O" xsi:type="Property"/>
</operand>
<operand name="Lookback Interval One Year" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="Colonoscopy Procedure" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="C">
<expression dataType="fhir:Procedure" codeProperty="code" xsi:type="Retrieve">
<codes name="Colonoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="C" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="completed" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="end" xsi:type="Property">
<source path="performedPeriod" scope="C" xsi:type="Property"/>
</source>
</operand>
<operand name="Lookback Interval Ten Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
<def name="Colonoscopy Observation" context="Patient" accessLevel="Public">
<expression xsi:type="Query">
<source alias="O">
<expression dataType="fhir:Observation" codeProperty="code" xsi:type="Retrieve">
<codes name="Colonoscopy" xsi:type="ValueSetRef"/>
</expression>
</source>
<where xsi:type="And">
<operand xsi:type="Equal">
<operand path="value" xsi:type="Property">
<source path="status" scope="O" xsi:type="Property"/>
</operand>
<operand valueType="t:String" value="final" xsi:type="Literal"/>
</operand>
<operand xsi:type="In">
<operand path="value" xsi:type="Property">
<source path="effectiveDateTime" scope="O" xsi:type="Property"/>
</operand>
<operand name="Lookback Interval Ten Years" xsi:type="ExpressionRef"/>
</operand>
</where>
</expression>
</def>
</statements>
</library>

View File

@ -1,138 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Measure xmlns="http://hl7.org/fhir" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://hl7.org/fhir ../../schema/measure.xsd">
<id value="col"/>
<text>
<status value="additional"/>
<div xmlns="http://www.w3.org/1999/xhtml">
Cohort definition for Colorectal Cancer Screening.
</div>
</text>
<identifier>
<use value="official"/>
<system value="http://hl7.org/fhir/cqi/ecqm/Measure/Identifier/payer-extract"/>
<value value="COL"/>
</identifier>
<version value="1.0.0"/>
<title value="Colorectal Cancer Screening. Cohort Definition"/>
<status value="active"/>
<experimental value="true"/>
<description value="Colorectal Cancer Screening. Cohort Definition"/>
<topic>
<coding>
<system value="http://hl7.org/fhir/c80-doc-typecodes"/>
<code value="57024-2"/>
</coding>
</topic>
<library>
<reference value="Library/col-logic"/>
</library>
<scoring value="cohort"/>
<group>
<identifier>
<value value="in-demographic"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="in-demographic"/>
</identifier>
<criteria value="In Demographic"/>
</population>
</group>
<group>
<identifier>
<value value="history-of-colorectal-cancer"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="history-of-colorectal-cancer"/>
</identifier>
<criteria value="Hx Colorectal Cancer"/>
</population>
</group>
<group>
<identifier>
<value value="history-of-total-colectomy"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="history-of-total-colectomy"/>
</identifier>
<criteria value="Hx Total Colectomy"/>
</population>
</group>
<group>
<identifier>
<value value="colonoscopy-performed"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="colonoscopy-performed"/>
</identifier>
<criteria value="Colonoscopy Performed"/>
</population>
</group>
<group>
<identifier>
<value value="colonoscopy-results"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="colonoscopy-results"/>
</identifier>
<criteria value="Colonoscopy Results"/>
</population>
</group>
<group>
<identifier>
<value value="sigmoidoscopy-procedure"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="sigmoidoscopy-procedure"/>
</identifier>
<criteria value="Sigmoidoscopy Procedure"/>
</population>
</group>
<group>
<identifier>
<value value="sigmoidoscopy-observation"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="sigmoidoscopy-observation"/>
</identifier>
<criteria value="Sigmoidoscopy Observation"/>
</population>
</group>
<group>
<identifier>
<value value="fobt-procedure"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="fobt-procedure"/>
</identifier>
<criteria value="FOBT Procedure"/>
</population>
</group>
<group>
<identifier>
<value value="fobt-observation"/>
</identifier>
<population>
<type value="initial-population"/>
<identifier>
<value value="fobt-observation"/>
</identifier>
<criteria value="FOBT Observation"/>
</population>
</group>
</Measure>

View File

@ -1,49 +0,0 @@
{
"resourceType": "Condition",
"id": "Condition-13",
"meta": {
"versionId": "1",
"lastUpdated": "2017-09-09T21:52:17.035-06:00"
},
"extension": [
{
"url": "http://mihin.org/fhir/templateId",
"valueString": "2.16.840.1.113883.10.20.22.4.3"
},
{
"url": "http://mihin.org/fhir/templateId",
"valueString": "2.16.840.1.113883.10.20.24.3.137"
}
],
"clinicalStatus": "active",
"verificationStatus": "confirmed",
"category": [
{
"coding": [
{
"system": "http://hl7.org/fhir/condition-category",
"code": "diagnosis",
"display": "Diagnosis"
}
],
"text": "This is a judgment made by a healthcare provider that the patient has a particular disease or condition"
}
],
"code": {
"coding": [
{
"system": "http://snomed.info/sct",
"code": "363414004"
}
],
"text": "Diagnosis: Malignant Neoplasm Of Colon"
},
"subject": {
"reference": "Patient/Patient-12214",
"display": "2 N Geriatric Jr"
},
"asserter": {
"reference": "Practitioner/Practitioner-12208",
"display": "Jay McCann Sawyer MD"
}
}

View File

@ -1,158 +0,0 @@
{
"resourceType": "Measure",
"id": "col",
"meta": {
"versionId": "1",
"lastUpdated": "2017-09-09T21:26:03.890-06:00"
},
"text": {
"status": "additional",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\">\n Cohort definition for Colorectal Cancer Screening.\n </div>"
},
"identifier": [
{
"use": "official",
"system": "http://hl7.org/fhir/cqi/ecqm/Measure/Identifier/payer-extract",
"value": "COL"
}
],
"version": "1.0.0",
"title": "Colorectal Cancer Screening. Cohort Definition",
"status": "active",
"experimental": true,
"description": "Colorectal Cancer Screening. Cohort Definition",
"topic": [
{
"coding": [
{
"system": "http://hl7.org/fhir/c80-doc-typecodes",
"code": "57024-2"
}
]
}
],
"library": [
{
"reference": "Library/col-logic"
}
],
"group": [
{
"identifier": {
"value": "in-demographic"
},
"population": [
{
"identifier": {
"value": "in-demographic"
},
"criteria": "In Demographic"
}
]
},
{
"identifier": {
"value": "history-of-colorectal-cancer"
},
"population": [
{
"identifier": {
"value": "history-of-colorectal-cancer"
},
"criteria": "Hx Colorectal Cancer"
}
]
},
{
"identifier": {
"value": "history-of-total-colectomy"
},
"population": [
{
"identifier": {
"value": "history-of-total-colectomy"
},
"criteria": "Hx Total Colectomy"
}
]
},
{
"identifier": {
"value": "colonoscopy-performed"
},
"population": [
{
"identifier": {
"value": "colonoscopy-performed"
},
"criteria": "Colonoscopy Performed"
}
]
},
{
"identifier": {
"value": "colonoscopy-results"
},
"population": [
{
"identifier": {
"value": "colonoscopy-results"
},
"criteria": "Colonoscopy Results"
}
]
},
{
"identifier": {
"value": "sigmoidoscopy-procedure"
},
"population": [
{
"identifier": {
"value": "sigmoidoscopy-procedure"
},
"criteria": "Sigmoidoscopy Procedure"
}
]
},
{
"identifier": {
"value": "sigmoidoscopy-observation"
},
"population": [
{
"identifier": {
"value": "sigmoidoscopy-observation"
},
"criteria": "Sigmoidoscopy Observation"
}
]
},
{
"identifier": {
"value": "fobt-procedure"
},
"population": [
{
"identifier": {
"value": "fobt-procedure"
},
"criteria": "FOBT Procedure"
}
]
},
{
"identifier": {
"value": "fobt-observation"
},
"population": [
{
"identifier": {
"value": "fobt-observation"
},
"criteria": "FOBT Observation"
}
]
}
]
}

View File

@ -1,68 +0,0 @@
{
"resourceType": "Procedure",
"id": "Procedure-9",
"meta": {
"versionId": "1",
"lastUpdated": "2017-09-09T21:52:35.933-06:00"
},
"extension": [
{
"url": "http://mihin.org/fhir/templateId",
"valueString": "2.16.840.1.113883.10.20.24.3.64"
},
{
"url": "http://mihin.org/fhir/templateId",
"valueString": "2.16.840.1.113883.10.20.22.4.14"
}
],
"identifier": [
{
"system": "http://hl7.org/fhir/identifier",
"value": "1.3.6.1.4.1.115:579f4eb5aeac500a550c5c7b"
}
],
"status": "completed",
"category": {
"coding": [
{
"system": "http://snomed.info/sct",
"code": "387713003",
"display": "Surgical Procedure"
}
]
},
"code": {
"coding": [
{
"system": "http://snomed.info/sct",
"code": "36192008"
}
],
"text": "Procedure, Performed: Total Colectomy"
},
"subject": {
"reference": "Patient/Patient-12214",
"display": "2 N Geriatric Jr"
},
"performedPeriod": {
"start": "2010-10-12T06:00:00-04:00",
"end": "2010-10-12T08:15:00-04:00"
},
"performer": [
{
"role": {
"coding": [
{
"system": "http://hl7.org/fhir/ValueSet/performer-role",
"code": "112247003",
"display": "Medical doctor (occupation)"
}
]
},
"actor": {
"reference": "Practitioner/Practitioner-12208",
"display": "Jay McCann Sawyer MD"
}
}
]
}

View File

@ -1,416 +0,0 @@
{
"resourceType": "ValueSet",
"id": "2.16.840.1.113883.3.464.1003.108.11.1001",
"meta": {
"versionId": "3",
"lastUpdated": "2017-07-25T09:54:33.579+00:00"
},
"url": "http://measure.eval.kanvix.com/cqf-ruler/baseDstu3/Valueset/2.16.840.1.113883.3.464.1003.108.11.1001",
"name": "Malignant Neoplasm of Colon (SNOMED CT) eCQM",
"status": "active",
"compose": {
"include": [
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"concept": [
{
"code": "187758006"
},
{
"code": "109838007"
},
{
"code": "1701000119104"
},
{
"code": "187757001"
},
{
"code": "269533000"
},
{
"code": "269544008"
},
{
"code": "285312008"
},
{
"code": "285611007"
},
{
"code": "301756000"
},
{
"code": "312111009"
},
{
"code": "312112002"
},
{
"code": "312113007"
},
{
"code": "312114001"
},
{
"code": "312115000"
},
{
"code": "314965007"
},
{
"code": "315058005"
},
{
"code": "363406005"
},
{
"code": "363407001"
},
{
"code": "363408006"
},
{
"code": "363409003"
},
{
"code": "363410008"
},
{
"code": "363412000"
},
{
"code": "363413005"
},
{
"code": "363414004"
},
{
"code": "363510005"
},
{
"code": "425178004"
},
{
"code": "449218003"
},
{
"code": "93683002"
},
{
"code": "93761005"
},
{
"code": "93771007"
},
{
"code": "93826009"
},
{
"code": "93980002"
},
{
"code": "94006002"
},
{
"code": "94072004"
},
{
"code": "94105000"
},
{
"code": "94179005"
},
{
"code": "94260004"
},
{
"code": "94271003"
},
{
"code": "94328005"
},
{
"code": "94509004"
},
{
"code": "94538001"
},
{
"code": "94604000"
},
{
"code": "94643001"
}
]
}
]
},
"expansion": {
"identifier": "http://open-api2.hspconsortium.org/payerextract/data/ValueSet/2.16.840.1.113883.3.464.1003.108.11.1001",
"timestamp": "2016-09-19T14:05:21.939-04:00",
"total": 43,
"offset": 0,
"contains": [
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "425178004",
"display": "Adenocarcinoma of rectosigmoid junction"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "301756000",
"display": "Adenocarcinoma of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "312111009",
"display": "Carcinoma of ascending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "269533000",
"display": "Carcinoma of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "312113007",
"display": "Carcinoma of descending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "312114001",
"display": "Carcinoma of hepatic flexure"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "285312008",
"display": "Carcinoma of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "312115000",
"display": "Carcinoma of splenic flexure"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "269544008",
"display": "Carcinoma of the rectosigmoid junction"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "312112002",
"display": "Carcinoma of transverse colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "315058005",
"display": "Lynch syndrome"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "314965007",
"display": "Local recurrence of malignant tumor of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "449218003",
"display": "Lymphoma of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "187758006",
"display": "Malignant neoplasm of other specified sites of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "187757001",
"display": "Malignant neoplasm, overlapping lesion of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363412000",
"display": "Malignant tumor of ascending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363406005",
"display": "Malignant tumor of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363409003",
"display": "Malignant tumor of descending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363407001",
"display": "Malignant tumor of hepatic flexure"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363510005",
"display": "Malignant tumor of large intestine"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363414004",
"display": "Malignant tumor of rectosigmoid junction"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363410008",
"display": "Malignant tumor of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363413005",
"display": "Malignant tumor of splenic flexure"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "363408006",
"display": "Malignant tumor of transverse colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "285611007",
"display": "Metastasis to colon of unknown primary"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "109838007",
"display": "Overlapping malignant neoplasm of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "1701000119104",
"display": "Primary adenocarcinoma of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "93683002",
"display": "Primary malignant neoplasm of ascending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "93761005",
"display": "Primary malignant neoplasm of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "93771007",
"display": "Primary malignant neoplasm of descending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "93826009",
"display": "Primary malignant neoplasm of hepatic flexure of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "93980002",
"display": "Primary malignant neoplasm of rectosigmoid junction"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94006002",
"display": "Primary malignant neoplasm of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94072004",
"display": "Primary malignant neoplasm of splenic flexure of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94105000",
"display": "Primary malignant neoplasm of transverse colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94179005",
"display": "Secondary malignant neoplasm of ascending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94260004",
"display": "Secondary malignant neoplasm of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94271003",
"display": "Secondary malignant neoplasm of descending colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94328005",
"display": "Secondary malignant neoplasm of hepatic flexure of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94509004",
"display": "Secondary malignant neoplasm of rectosigmoid junction"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94538001",
"display": "Secondary malignant neoplasm of sigmoid colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94604000",
"display": "Secondary malignant neoplasm of splenic flexure of colon"
},
{
"system": "http://snomed.info/sct",
"version": "2015.03.14AB",
"code": "94643001",
"display": "Secondary malignant neoplasm of transverse colon"
}
]
}
}

View File

@ -1,181 +0,0 @@
{
"resourceType": "ValueSet",
"id": "2.16.840.1.113883.3.464.1003.198.12.1019",
"meta": {
"versionId": "3",
"lastUpdated": "2017-07-25T09:54:33.579+00:00"
},
"url": "http://measure.eval.kanvix.com/cql-measure-processor/baseDstu3/Valueset/2.16.840.1.113883.3.464.1003.198.12.1019 ",
"name": "Total Colectomy eMeasure",
"compose": {
"include": [
{
"system": "http://www.ama-assn.org/go/cpt",
"version": "2016.1.15AA",
"concept": [
{
"code": "44156"
},
{
"code": "44158"
},
{
"code": "44157"
},
{
"code": "44155"
},
{
"code": "44151"
},
{
"code": "44150"
},
{
"code": "44211"
},
{
"code": "44212"
},
{
"code": "44210"
},
{
"code": "44153"
},
{
"code": "44152"
}
]
},
{
"system": "http://snomed.info/sct",
"version": "2015.09.15AA",
"filter": [
{
"property": "concept",
"op": "is-a",
"value": "26390003"
}
]
}
]
},
"expansion": {
"timestamp": "2016-09-20T12:32:19.296-04:00",
"total": 22,
"offset": 0,
"contains": [
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44156",
"display": "Colectomy, total, abdominal, with proctectomy; with continent ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44158",
"display": "Colectomy, total, abdominal, with proctectomy; with ileoanal anastomosis, creation of ileal reservoir (S or J), includes loop ileostomy, and rectal mucosectomy, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44157",
"display": "Colectomy, total, abdominal, with proctectomy; with ileoanal anastomosis, includes loop ileostomy, and rectal mucosectomy, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44155",
"display": "Colectomy, total, abdominal, with proctectomy; with ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44151",
"display": "Colectomy, total, abdominal, without proctectomy; with continent ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44150",
"display": "Colectomy, total, abdominal, without proctectomy; with ileostomy or ileoproctostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44153",
"display": "Colectomy, total, abdominal, without proctectomy; with rectal mucosectomy, ileoanal anastomosis, creation of ileal reservoir (S or J), with or without loop ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44152",
"display": "Colectomy, total, abdominal, without proctectomy; with rectal mucosectomy, ileoanal anastomosis, with or without loop ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44211",
"display": "Laparoscopy, surgical; colectomy, total, abdominal, with proctectomy, with ileoanal anastomosis, creation of ileal reservoir (S or J), with loop ileostomy, includes rectal mucosectomy, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44212",
"display": "Laparoscopy, surgical; colectomy, total, abdominal, with proctectomy, with ileostomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44210",
"display": "Laparoscopy, surgical; colectomy, total, abdominal, without proctectomy, with ileostomy or ileoproctostomy"
},
{
"system": "http://snomed.info/sct",
"code": "303401008",
"display": "Parks panproctocolectomy, anastomosis of ileum to anus and creation of pouch"
},
{
"system": "http://snomed.info/sct",
"code": "235331003",
"display": "Restorative proctocolectomy"
},
{
"system": "http://snomed.info/sct",
"code": "36192008",
"display": "Total abdominal colectomy with ileoproctostomy"
},
{
"system": "http://snomed.info/sct",
"code": "456004",
"display": "Total abdominal colectomy with ileostomy"
},
{
"system": "http://snomed.info/sct",
"code": "44751009",
"display": "Total abdominal colectomy with proctectomy and continent ileostomy"
},
{
"system": "http://snomed.info/sct",
"code": "31130001",
"display": "Total abdominal colectomy with proctectomy and ileostomy"
},
{
"system": "http://snomed.info/sct",
"code": "80294005",
"display": "Total abdominal colectomy with rectal mucosectomy and ileoanal anastomosis"
},
{
"system": "http://snomed.info/sct",
"code": "26390003",
"display": "Total colectomy"
},
{
"system": "http://snomed.info/sct",
"code": "307666008",
"display": "Total colectomy and ileostomy"
},
{
"system": "http://snomed.info/sct",
"code": "307669001",
"display": "Total colectomy, ileostomy and closure of rectal stump"
},
{
"system": "http://snomed.info/sct",
"code": "307667004",
"display": "Total colectomy, ileostomy and rectal mucous fistula"
}
]
}
}

View File

@ -1,421 +0,0 @@
{
"resourceType": "ValueSet",
"id": "2.16.840.1.113883.3.464.1003.108.12.1020",
"meta": {
"versionId": "3",
"lastUpdated": "2017-07-25T09:54:33.579+00:00"
},
"url": "http://measure.eval.kanvix.com/cql-measure-processor/baseDstu3/Valueset/2.16.840.1.113883.3.464.1003.108.12.1020",
"name": "Colonoscopy eMeasure",
"compose": {
"include": [
{
"system": "http://www.ama-assn.org/go/cpt",
"version": "2015.1.14AB",
"concept": [
{
"code": "44388"
},
{
"code": "44393"
},
{
"code": "44389"
},
{
"code": "44391"
},
{
"code": "44390"
},
{
"code": "44392"
},
{
"code": "44394"
},
{
"code": "44397"
},
{
"code": "45378"
},
{
"code": "45383"
},
{
"code": "45380"
},
{
"code": "45382"
},
{
"code": "45386"
},
{
"code": "45381"
},
{
"code": "45391"
},
{
"code": "45379"
},
{
"code": "45384"
},
{
"code": "45385"
},
{
"code": "45387"
},
{
"code": "45392"
},
{
"code": "45355"
},
{
"code": "44401"
},
{
"code": "44402"
},
{
"code": "44403"
},
{
"code": "44404"
},
{
"code": "44405"
},
{
"code": "44406"
},
{
"code": "44407"
},
{
"code": "44408"
},
{
"code": "45388"
},
{
"code": "45389"
},
{
"code": "45390"
},
{
"code": "45393"
},
{
"code": "45398"
}
]
},
{
"system": "http://snomed.info/sct",
"version": "2014.07.14AA",
"filter": [
{
"property": "concept",
"op": "is-a",
"value": "73761001"
}
]
},
{
"system": "http://snomed.info/sct",
"version": "2014.07.14AA",
"filter": [
{
"property": "concept",
"op": "is-a",
"value": "174184006"
}
]
}
]
},
"expansion": {
"timestamp": "2016-09-20T13:07:55.271-04:00",
"total": 54,
"offset": 0,
"contains": [
{
"system": "http://snomed.info/sct",
"code": "310634005",
"display": "Check colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "73761001",
"display": "Colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "446745002",
"display": "Colonoscopy and biopsy of colon"
},
{
"system": "http://snomed.info/sct",
"code": "446521004",
"display": "Colonoscopy and excision of mucosa of colon"
},
{
"system": "http://snomed.info/sct",
"code": "447021001",
"display": "Colonoscopy and tattooing"
},
{
"system": "http://snomed.info/sct",
"code": "443998000",
"display": "Colonoscopy through colostomy with endoscopic biopsy of colon"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44388",
"display": "Colonoscopy through stoma; diagnostic, including collection of specimen(s) by brushing or washing, when performed (separate procedure)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44401",
"display": "Colonoscopy through stoma; with ablation of tumor(s), polyp(s), or other lesion(s) (includes pre-and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44393",
"display": "Colonoscopy through stoma; with ablation of tumor(s), polyp(s), or other lesion(s) not amenable to removal by hot biopsy forceps, bipolar cautery or snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44389",
"display": "Colonoscopy through stoma; with biopsy, single or multiple"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44391",
"display": "Colonoscopy through stoma; with control of bleeding, any method"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44408",
"display": "Colonoscopy through stoma; with decompression (for pathologic distention) (eg, volvulus, megacolon), including placement of decompression tube, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44404",
"display": "Colonoscopy through stoma; with directed submucosal injection(s), any substance"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44403",
"display": "Colonoscopy through stoma; with endoscopic mucosal resection"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44402",
"display": "Colonoscopy through stoma; with endoscopic stent placement (including pre- and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44406",
"display": "Colonoscopy through stoma; with endoscopic ultrasound examination, limited to the sigmoid, descending, transverse, or ascending colon and cecum and adjacent structures"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44390",
"display": "Colonoscopy through stoma; with removal of foreign body(s)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44392",
"display": "Colonoscopy through stoma; with removal of tumor(s), polyp(s), or other lesion(s) by hot biopsy forceps"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44394",
"display": "Colonoscopy through stoma; with removal of tumor(s), polyp(s), or other lesion(s) by snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44405",
"display": "Colonoscopy through stoma; with transendoscopic balloon dilation"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44397",
"display": "Colonoscopy through stoma; with transendoscopic stent placement (includes predilation)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "44407",
"display": "Colonoscopy through stoma; with transendoscopic ultrasound guided intramural or transmural fine needle aspiration/biopsy(s), includes endoscopic ultrasound examination limited to the sigmoid, descending, transverse, or ascending colon and cecum and adjacent structures"
},
{
"system": "http://snomed.info/sct",
"code": "12350003",
"display": "Colonoscopy with rigid sigmoidoscope through colotomy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45383",
"display": "Colonoscopy, flexible, proximal to splenic flexure; with ablation of tumor(s), polyp(s), or other lesion(s) not amenable to removal by hot biopsy forceps, bipolar cautery or snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45387",
"display": "Colonoscopy, flexible, proximal to splenic flexure; with transendoscopic stent placement (includes predilation)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45378",
"display": "Colonoscopy, flexible; diagnostic, including collection of specimen(s) by brushing or washing, when performed (separate procedure)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45388",
"display": "Colonoscopy, flexible; with ablation of tumor(s), polyp(s), or other lesion(s) (includes pre- and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45398",
"display": "Colonoscopy, flexible; with band ligation(s) (eg, hemorrhoids)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45380",
"display": "Colonoscopy, flexible; with biopsy, single or multiple"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45382",
"display": "Colonoscopy, flexible; with control of bleeding, any method"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45393",
"display": "Colonoscopy, flexible; with decompression (for pathologic distention) (eg, volvulus, megacolon), including placement of decompression tube, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45381",
"display": "Colonoscopy, flexible; with directed submucosal injection(s), any substance"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45390",
"display": "Colonoscopy, flexible; with endoscopic mucosal resection"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45389",
"display": "Colonoscopy, flexible; with endoscopic stent placement (includes pre- and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45391",
"display": "Colonoscopy, flexible; with endoscopic ultrasound examination limited to the rectum, sigmoid, descending, transverse, or ascending colon and cecum, and adjacent structures"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45379",
"display": "Colonoscopy, flexible; with removal of foreign body(s)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45384",
"display": "Colonoscopy, flexible; with removal of tumor(s), polyp(s), or other lesion(s) by hot biopsy forceps"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45385",
"display": "Colonoscopy, flexible; with removal of tumor(s), polyp(s), or other lesion(s) by snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45386",
"display": "Colonoscopy, flexible; with transendoscopic balloon dilation"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45392",
"display": "Colonoscopy, flexible; with transendoscopic ultrasound guided intramural or transmural fine needle aspiration/biopsy(s), includes endoscopic ultrasound examination limited to the rectum, sigmoid, descending, transverse, or ascending colon and cecum, and adjacent structures"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45355",
"display": "Colonoscopy, rigid or flexible, transabdominal via colotomy, single or multiple"
},
{
"system": "https://www.cms.gov/Medicare/Coding/MedHCPCSGenInfo/index.html",
"code": "G0105",
"display": "Colorectal cancer screening; colonoscopy on individual at high risk"
},
{
"system": "https://www.cms.gov/Medicare/Coding/MedHCPCSGenInfo/index.html",
"code": "G0121",
"display": "Colorectal cancer screening; colonoscopy on individual not meeting criteria for high risk"
},
{
"system": "http://snomed.info/sct",
"code": "427459009",
"display": "Diagnostic endoscopic examination of colonic pouch and biopsy of colonic pouch using colonoscope"
},
{
"system": "http://snomed.info/sct",
"code": "174184006",
"display": "Diagnostic endoscopic examination on colon"
},
{
"system": "http://snomed.info/sct",
"code": "367535003",
"display": "Fiberoptic colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "8180007",
"display": "Fiberoptic colonoscopy through colostomy"
},
{
"system": "http://snomed.info/sct",
"code": "25732003",
"display": "Fiberoptic colonoscopy with biopsy"
},
{
"system": "http://snomed.info/sct",
"code": "34264006",
"display": "Intraoperative colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "235151005",
"display": "Limited colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "174158000",
"display": "Open colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "444783004",
"display": "Screening colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "303587008",
"display": "Therapeutic colonoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "235150006",
"display": "Total colonoscopy"
}
]
}
}

View File

@ -1,208 +0,0 @@
{
"resourceType": "ValueSet",
"id": "2.16.840.1.113883.3.464.1003.198.12.1010",
"meta": {
"versionId": "6",
"lastUpdated": "2017-07-25T09:54:33.579+00:00"
},
"url": "http://measure.eval.kanvix.com/cql-measure-processor/baseDstu3/Valueset/2.16.840.1.113883.3.464.1003.198.12.1010",
"name": "Flexible Sigmoidoscopy eMeasure",
"compose": {
"include": [
{
"system": "http://www.ama-assn.org/go/cpt",
"version": "2015.1.14AB",
"concept": [
{
"code": "45330"
},
{
"code": "45339"
},
{
"code": "45331"
},
{
"code": "45334"
},
{
"code": "45337"
},
{
"code": "45340"
},
{
"code": "45335"
},
{
"code": "45341"
},
{
"code": "45332"
},
{
"code": "45333"
},
{
"code": "45338"
},
{
"code": "45345"
},
{
"code": "45342"
},
{
"code": "45346"
},
{
"code": "45347"
},
{
"code": "45349"
},
{
"code": "45350"
}
]
},
{
"system": "https://www.cms.gov/Medicare/Coding/MedHCPCSGenInfo/index.html",
"version": "2016.1.15AB",
"concept": [
{
"code": "G0104"
}
]
},
{
"system": "http://snomed.info/sct",
"version": "2014.07.14AA",
"filter": [
{
"property": "concept",
"op": "is-a",
"value": "44441009"
}
]
}
]
},
"expansion": {
"timestamp": "2016-09-20T13:20:03.237-04:00",
"total": 22,
"offset": 0,
"contains": [
{
"system": "https://www.cms.gov/Medicare/Coding/MedHCPCSGenInfo/index.html",
"code": "G0104",
"display": "Colorectal cancer screening; flexible sigmoidoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "425634007",
"display": "Diagnostic endoscopic examination of lower bowel and sampling for bacterial overgrowth using fiberoptic sigmoidoscope"
},
{
"system": "http://snomed.info/sct",
"code": "44441009",
"display": "Flexible fiberoptic sigmoidoscopy"
},
{
"system": "http://snomed.info/sct",
"code": "112870002",
"display": "Flexible fiberoptic sigmoidoscopy for removal of foreign body"
},
{
"system": "http://snomed.info/sct",
"code": "396226005",
"display": "Flexible fiberoptic sigmoidoscopy with biopsy"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45330",
"display": "Sigmoidoscopy, flexible; diagnostic, including collection of specimen(s) by brushing or washing, when performed (separate procedure)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45346",
"display": "Sigmoidoscopy, flexible; with ablation of tumor(s), polyp(s), or other lesion(s) (includes pre- and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45339",
"display": "Sigmoidoscopy, flexible; with ablation of tumor(s), polyp(s), or other lesion(s) not amenable to removal by hot biopsy forceps, bipolar cautery or snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45350",
"display": "Sigmoidoscopy, flexible; with band ligation(s) (eg, hemorrhoids)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45331",
"display": "Sigmoidoscopy, flexible; with biopsy, single or multiple"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45334",
"display": "Sigmoidoscopy, flexible; with control of bleeding, any method"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45337",
"display": "Sigmoidoscopy, flexible; with decompression (for pathologic distention) (eg, volvulus, megacolon), including placement of decompression tube, when performed"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45335",
"display": "Sigmoidoscopy, flexible; with directed submucosal injection(s), any substance"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45349",
"display": "Sigmoidoscopy, flexible; with endoscopic mucosal resection"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45341",
"display": "Sigmoidoscopy, flexible; with endoscopic ultrasound examination"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45347",
"display": "Sigmoidoscopy, flexible; with placement of endoscopic stent (includes pre- and post-dilation and guide wire passage, when performed)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45332",
"display": "Sigmoidoscopy, flexible; with removal of foreign body(s)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45333",
"display": "Sigmoidoscopy, flexible; with removal of tumor(s), polyp(s), or other lesion(s) by hot biopsy forceps"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45338",
"display": "Sigmoidoscopy, flexible; with removal of tumor(s), polyp(s), or other lesion(s) by snare technique"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45340",
"display": "Sigmoidoscopy, flexible; with transendoscopic balloon dilation"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45345",
"display": "Sigmoidoscopy, flexible; with transendoscopic stent placement (includes predilation)"
},
{
"system": "http://www.ama-assn.org/go/cpt",
"code": "45342",
"display": "Sigmoidoscopy, flexible; with transendoscopic ultrasound guided intramural or transmural fine needle aspiration/biopsy(s)"
}
]
}
}

View File

@ -1,147 +0,0 @@
{
"resourceType": "ValueSet",
"id": "2.16.840.1.113883.3.464.1003.198.12.1011",
"meta": {
"versionId": "3",
"lastUpdated": "2017-07-25T09:54:33.579+00:00"
},
"url": "http://measure.eval.kanvix.com/cql-measure-processor/baseDstu3/Valueset/2.16.840.1.113883.3.464.1003.198.12.1011",
"name": "Fecal Occult Blood Test (FOBT) eMeasure",
"compose": {
"include": [
{
"system": "http://loinc.org",
"version": "2.44.13AA",
"concept": [
{
"code": "27396-1"
},
{
"code": "58453-2"
},
{
"code": "2335-8"
},
{
"code": "14563-1"
},
{
"code": "14564-9"
},
{
"code": "14565-6"
},
{
"code": "12503-9"
},
{
"code": "12504-7"
},
{
"code": "27401-9"
},
{
"code": "27925-7"
},
{
"code": "27926-5"
},
{
"code": "29771-3"
},
{
"code": "57905-2"
},
{
"code": "56490-6"
},
{
"code": "56491-4"
}
]
}
]
},
"expansion": {
"timestamp": "2016-09-20T13:32:34.390-04:00",
"total": 15,
"offset": 0,
"contains": [
{
"system": "http://loinc.org",
"code": "27396-1",
"display": "Hemoglobin.gastrointestinal [Mass/mass] in Stool"
},
{
"system": "http://loinc.org",
"code": "58453-2",
"display": "Hemoglobin.gastrointestinal [Mass/volume] in Stool by Immunologic method"
},
{
"system": "http://loinc.org",
"code": "2335-8",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool"
},
{
"system": "http://loinc.org",
"code": "14563-1",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --1st specimen"
},
{
"system": "http://loinc.org",
"code": "14564-9",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --2nd specimen"
},
{
"system": "http://loinc.org",
"code": "14565-6",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --3rd specimen"
},
{
"system": "http://loinc.org",
"code": "12503-9",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --4th specimen"
},
{
"system": "http://loinc.org",
"code": "12504-7",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --5th specimen"
},
{
"system": "http://loinc.org",
"code": "27401-9",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --6th specimen"
},
{
"system": "http://loinc.org",
"code": "27925-7",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --7th specimen"
},
{
"system": "http://loinc.org",
"code": "27926-5",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool --8th specimen"
},
{
"system": "http://loinc.org",
"code": "29771-3",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool by Immunologic method"
},
{
"system": "http://loinc.org",
"code": "57905-2",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool by Immunologic method --1st specimen"
},
{
"system": "http://loinc.org",
"code": "56490-6",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool by Immunologic method --2nd specimen"
},
{
"system": "http://loinc.org",
"code": "56491-4",
"display": "Hemoglobin.gastrointestinal [Presence] in Stool by Immunologic method --3rd specimen"
}
]
}
}

View File

@ -1,10 +1,6 @@
{
"resourceType": "PlanDefinition",
"id": "apply-example",
"text": {
"status": "generated",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\">General PlanDefinition $apply example resource</div>"
},
"identifier": [
{
"use": "official",
@ -56,4 +52,4 @@
]
}
]
}
}

View File

@ -4,6 +4,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.List;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.model.dstu2.resource.Patient;
@ -140,5 +141,21 @@ public class AuthorizationInterceptors {
}
};
//END SNIPPET: authorizeTenantAction
//START SNIPPET: patchAll
new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
// Authorize patch requests
.allow().patch().allRequests().andThen()
// Authorize actual writes that patch may perform
.allow().write().allResources().inCompartment("Patient", new IdType("Patient/123")).andThen()
.build();
}
};
//END SNIPPET: patchAll
}
}

View File

@ -171,6 +171,7 @@
<link>https://docs.oracle.com/javaee/7/api/</link>
</links>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
</reportSet>
</reportSets>
@ -194,6 +195,7 @@
<verbose>false</verbose>
<debug>false</debug>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<executions>
<execution>

View File

@ -1,7 +1,34 @@
package ca.uhn.fhir.context;
import ca.uhn.fhir.context.api.AddProfileTagEnum;
import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.fluentpath.IFluentPath;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.model.api.IElement;
import ca.uhn.fhir.model.api.IFhirVersion;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.view.ViewGenerator;
import ca.uhn.fhir.narrative.INarrativeGenerator;
import ca.uhn.fhir.parser.*;
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.client.api.IBasicClient;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IRestfulClient;
import ca.uhn.fhir.rest.client.api.IRestfulClientFactory;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.VersionUtil;
import ca.uhn.fhir.validation.FhirValidator;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
/*
* #%L
@ -23,30 +50,10 @@ import java.lang.reflect.Method;
* #L%
*/
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.api.AddProfileTagEnum;
import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.fluentpath.IFluentPath;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.view.ViewGenerator;
import ca.uhn.fhir.narrative.INarrativeGenerator;
import ca.uhn.fhir.parser.*;
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.client.api.*;
import ca.uhn.fhir.util.*;
import ca.uhn.fhir.validation.FhirValidator;
/**
* The FHIR context is the central starting point for the use of the HAPI FHIR API. It should be created once, and then
* used as a factory for various other types of objects (parsers, clients, etc.).
*
*
* <p>
* Important usage notes:
* </p>
@ -68,6 +75,7 @@ public class FhirContext {
private static final List<Class<? extends IBaseResource>> EMPTY_LIST = Collections.emptyList();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirContext.class);
private final IFhirVersion myVersion;
private AddProfileTagEnum myAddProfileTagWhenEncoding = AddProfileTagEnum.ONLY_FOR_CUSTOM;
private volatile Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinition = Collections.emptyMap();
private ArrayList<Class<? extends IBase>> myCustomTypes;
@ -87,14 +95,11 @@ public class FhirContext {
private volatile IRestfulClientFactory myRestfulClientFactory;
private volatile RuntimeChildUndeclaredExtensionDefinition myRuntimeChildUndeclaredExtensionDefinition;
private IContextValidationSupport<?, ?, ?, ?, ?, ?> myValidationSupport;
private final IFhirVersion myVersion;
private Map<FhirVersionEnum, Map<String, Class<? extends IBaseResource>>> myVersionToNameToResourceType = Collections.emptyMap();
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext() {
@ -103,7 +108,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class<? extends IBaseResource> theResourceType) {
@ -112,7 +117,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class<?>... theResourceTypes) {
@ -121,7 +126,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Collection<Class<? extends IBaseResource>> theResourceTypes) {
@ -161,7 +166,7 @@ public class FhirContext {
if (theVersion == null) {
ourLog.info("Creating new FhirContext with auto-detected version [{}]. It is recommended to explicitly select a version for future compatibility by invoking FhirContext.forDstuX()",
myVersion.getVersion().name());
myVersion.getVersion().name());
} else {
ourLog.info("Creating new FHIR context for FHIR version [{}]", myVersion.getVersion().name());
}
@ -201,13 +206,37 @@ public class FhirContext {
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
*
*
* @see #setAddProfileTagWhenEncoding(AddProfileTagEnum) for more information
*/
public AddProfileTagEnum getAddProfileTagWhenEncoding() {
return myAddProfileTagWhenEncoding;
}
/**
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
* <p>
* This feature is intended for situations where custom resource types are being used,
* avoiding the need to manually add profile declarations for these custom types.
* </p>
* <p>
* See <a href="http://jamesagnew.gihhub.io/hapi-fhir/doc_extensions.html">Profiling and Extensions</a>
* for more information on using custom types.
* </p>
* <p>
* Note that this feature automatically adds the profile, but leaves any profile tags
* which have been manually added in place as well.
* </p>
*
* @param theAddProfileTagWhenEncoding The add profile mode (must not be <code>null</code>)
*/
public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
}
Collection<RuntimeResourceDefinition> getAllResourceDefinitions() {
validateInitialized();
return myNameToResourceDefinition.values();
@ -215,7 +244,7 @@ public class FhirContext {
/**
* Returns the default resource type for the given profile
*
*
* @see #setDefaultTypeForProfile(String, Class)
*/
public Class<? extends IBaseResource> getDefaultTypeForProfile(String theProfile) {
@ -249,7 +278,9 @@ public class FhirContext {
return myNameToElementDefinition.get(theElementName.toLowerCase());
}
/** For unit tests only */
/**
* For unit tests only
*/
int getElementDefinitionCount() {
validateInitialized();
return myClassToElementDefinition.size();
@ -274,20 +305,43 @@ public class FhirContext {
return myLocalizer;
}
/**
* This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
* caution
*/
public void setLocalizer(HapiLocalizer theMessages) {
myLocalizer = theMessages;
}
public INarrativeGenerator getNarrativeGenerator() {
return myNarrativeGenerator;
}
public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
myNarrativeGenerator = theNarrativeGenerator;
}
/**
* Returns the parser options object which will be used to supply default
* options to newly created parsers
*
*
* @return The parser options - Will not return <code>null</code>
*/
public ParserOptions getParserOptions() {
return myParserOptions;
}
/**
* Sets the parser options object which will be used to supply default
* options to newly created parsers
*
* @param theParserOptions The parser options object - Must not be <code>null</code>
*/
public void setParserOptions(ParserOptions theParserOptions) {
Validate.notNull(theParserOptions, "theParserOptions must not be null");
myParserOptions = theParserOptions;
}
/**
* Get the configured performance options
*/
@ -295,6 +349,32 @@ public class FhirContext {
return myPerformanceOptions;
}
// /**
// * Return an unmodifiable collection containing all known resource definitions
// */
// public Collection<RuntimeResourceDefinition> getResourceDefinitions() {
//
// Set<Class<? extends IBase>> datatypes = Collections.emptySet();
// Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> existing = Collections.emptyMap();
// HashMap<String, Class<? extends IBaseResource>> types = new HashMap<String, Class<? extends IBaseResource>>();
// ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
// for (int next : types.)
//
// return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
// }
/**
* Sets the configured performance options
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(Collection<PerformanceOptionsEnum> theOptions) {
myPerformanceOptions.clear();
if (theOptions != null) {
myPerformanceOptions.addAll(theOptions);
}
}
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@ -359,8 +439,12 @@ public class FhirContext {
* <p>
* Note that this method is case insensitive!
* </p>
*
* @throws DataFormatException If the resource name is not known
*/
public RuntimeResourceDefinition getResourceDefinition(String theResourceName) {
// Multiple spots in HAPI FHIR and Smile CDR depend on DataFormatException being
// thrown by this method, don't change that.
public RuntimeResourceDefinition getResourceDefinition(String theResourceName) throws DataFormatException {
validateInitialized();
Validate.notBlank(theResourceName, "theResourceName must not be blank");
@ -380,20 +464,6 @@ public class FhirContext {
return retVal;
}
// /**
// * Return an unmodifiable collection containing all known resource definitions
// */
// public Collection<RuntimeResourceDefinition> getResourceDefinitions() {
//
// Set<Class<? extends IBase>> datatypes = Collections.emptySet();
// Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> existing = Collections.emptyMap();
// HashMap<String, Class<? extends IBaseResource>> types = new HashMap<String, Class<? extends IBaseResource>>();
// ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
// for (int next : types.)
//
// return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
// }
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@ -412,10 +482,40 @@ public class FhirContext {
return myIdToResourceDefinition.values();
}
/**
* Returns an unmodifiable set containing all resource names known to this
* context
*/
public Set<String> getResourceNames() {
Set<String> resourceNames = new HashSet<>();
if (myNameToResourceDefinition.isEmpty()) {
Properties props = new Properties();
try {
props.load(myVersion.getFhirVersionPropertiesFile());
} catch (IOException theE) {
throw new ConfigurationException("Failed to load version properties file");
}
Enumeration<?> propNames = props.propertyNames();
while (propNames.hasMoreElements()) {
String next = (String) propNames.nextElement();
if (next.startsWith("resource.")) {
resourceNames.add(next.substring("resource.".length()).trim());
}
}
}
for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
resourceNames.add(next.getName());
}
return Collections.unmodifiableSet(resourceNames);
}
/**
* Get the restful client factory. If no factory has been set, this will be initialized with
* a new ApacheRestfulClientFactory.
*
*
* @return the factory used to create the restful clients
*/
public IRestfulClientFactory getRestfulClientFactory() {
@ -429,6 +529,16 @@ public class FhirContext {
return myRestfulClientFactory;
}
/**
* Set the restful client factory
*
* @param theRestfulClientFactory
*/
public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
this.myRestfulClientFactory = theRestfulClientFactory;
}
public RuntimeChildUndeclaredExtensionDefinition getRuntimeChildUndeclaredExtensionDefinition() {
validateInitialized();
return myRuntimeChildUndeclaredExtensionDefinition;
@ -438,7 +548,7 @@ public class FhirContext {
* Returns the validation support module configured for this context, creating a default
* implementation if no module has been passed in via the {@link #setValidationSupport(IContextValidationSupport)}
* method
*
*
* @see #setValidationSupport(IContextValidationSupport)
*/
public IContextValidationSupport<?, ?, ?, ?, ?, ?> getValidationSupport() {
@ -448,6 +558,15 @@ public class FhirContext {
return myValidationSupport;
}
/**
* Sets the validation support module to use for this context. The validation support module
* is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
* as well as to provide terminology services to modules such as the validator and FluentPath executor
*/
public void setValidationSupport(IContextValidationSupport<?, ?, ?, ?, ?, ?> theValidationSupport) {
myValidationSupport = theValidationSupport;
}
public IFhirVersion getVersion() {
return myVersion;
}
@ -455,7 +574,7 @@ public class FhirContext {
/**
* Returns <code>true</code> if any default types for specific profiles have been defined
* within this context.
*
*
* @see #setDefaultTypeForProfile(String, Class)
* @see #getDefaultTypeForProfile(String)
*/
@ -483,7 +602,7 @@ public class FhirContext {
* on a context for a previous version of fhir will result in an
* {@link UnsupportedOperationException}
* </p>
*
*
* @since 2.2
*/
public IFluentPath newFluentPath() {
@ -492,7 +611,7 @@ public class FhirContext {
/**
* Create and return a new JSON parser.
*
*
* <p>
* Thread safety: <b>Parsers are not guaranteed to be thread safe</b>. Create a new parser instance for every thread
* or every message being parsed/encoded.
@ -513,19 +632,16 @@ public class FhirContext {
* sub-interface {@link IBasicClient}). See the <a
* href="http://jamesagnew.github.io/hapi-fhir/doc_rest_client.html">RESTful Client</a> documentation for more
* information on how to define this interface.
*
*
* <p>
* Performance Note: <b>This method is cheap</b> to call, and may be called once for every operation invocation
* without incurring any performance penalty
* </p>
*
* @param theClientType
* The client type, which is an interface type to be instantiated
* @param theServerBase
* The URL of the base for the restful FHIR server to connect to
*
* @param theClientType The client type, which is an interface type to be instantiated
* @param theServerBase The URL of the base for the restful FHIR server to connect to
* @return A newly created client
* @throws ConfigurationException
* If the interface type is not an interface
* @throws ConfigurationException If the interface type is not an interface
*/
public <T extends IRestfulClient> T newRestfulClient(Class<T> theClientType, String theServerBase) {
return getRestfulClientFactory().newClient(theClientType, theServerBase);
@ -535,14 +651,13 @@ public class FhirContext {
* Instantiates a new generic client. A generic client is able to perform any of the FHIR RESTful operations against
* a compliant server, but does not have methods defining the specific functionality required (as is the case with
* {@link #newRestfulClient(Class, String) non-generic clients}).
*
*
* <p>
* Performance Note: <b>This method is cheap</b> to call, and may be called once for every operation invocation
* without incurring any performance penalty
* </p>
*
* @param theServerBase
* The URL of the base for the restful FHIR server to connect to
*
* @param theServerBase The URL of the base for the restful FHIR server to connect to
*/
public IGenericClient newRestfulGenericClient(String theServerBase) {
return getRestfulClientFactory().newGenericClient(theServerBase);
@ -569,7 +684,7 @@ public class FhirContext {
/**
* Create and return a new XML parser.
*
*
* <p>
* Thread safety: <b>Parsers are not guaranteed to be thread safe</b>. Create a new parser instance for every thread
* or every message being parsed/encoded.
@ -592,9 +707,8 @@ public class FhirContext {
* <b>THREAD SAFETY WARNING:</b> This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
* </p>
*
* @param theType
* The custom type to add (must not be <code>null</code>)
*
* @param theType The custom type to add (must not be <code>null</code>)
*/
public void registerCustomType(Class<? extends IBase> theType) {
Validate.notNull(theType, "theType must not be null");
@ -612,9 +726,8 @@ public class FhirContext {
* <b>THREAD SAFETY WARNING:</b> This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
* </p>
*
* @param theTypes
* The custom types to add (must not be <code>null</code> or contain null elements in the collection)
*
* @param theTypes The custom types to add (must not be <code>null</code> or contain null elements in the collection)
*/
public void registerCustomTypes(Collection<Class<? extends IBase>> theTypes) {
Validate.notNull(theTypes, "theTypes must not be null");
@ -698,31 +811,6 @@ public class FhirContext {
return classToElementDefinition;
}
/**
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
* <p>
* This feature is intended for situations where custom resource types are being used,
* avoiding the need to manually add profile declarations for these custom types.
* </p>
* <p>
* See <a href="http://jamesagnew.gihhub.io/hapi-fhir/doc_extensions.html">Profiling and Extensions</a>
* for more information on using custom types.
* </p>
* <p>
* Note that this feature automatically adds the profile, but leaves any profile tags
* which have been manually added in place as well.
* </p>
*
* @param theAddProfileTagWhenEncoding
* The add profile mode (must not be <code>null</code>)
*/
public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
}
/**
* Sets the default type which will be used when parsing a resource that is found to be
* of the given profile.
@ -732,12 +820,10 @@ public class FhirContext {
* if the parser is parsing a resource and finds that it declares that it conforms to that profile,
* the <code>MyPatient</code> type will be used unless otherwise specified.
* </p>
*
* @param theProfile
* The profile string, e.g. <code>"http://example.com/some_patient_profile"</code>. Must not be
* <code>null</code> or empty.
* @param theClass
* The resource type, or <code>null</code> to clear any existing type
*
* @param theProfile The profile string, e.g. <code>"http://example.com/some_patient_profile"</code>. Must not be
* <code>null</code> or empty.
* @param theClass The resource type, or <code>null</code> to clear any existing type
*/
public void setDefaultTypeForProfile(String theProfile, Class<? extends IBaseResource> theClass) {
Validate.notBlank(theProfile, "theProfile must not be null or empty");
@ -748,56 +834,19 @@ public class FhirContext {
}
}
/**
* This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
* caution
*/
public void setLocalizer(HapiLocalizer theMessages) {
myLocalizer = theMessages;
}
public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
myNarrativeGenerator = theNarrativeGenerator;
}
/**
* Sets a parser error handler to use by default on all parsers
*
* @param theParserErrorHandler
* The error handler
*
* @param theParserErrorHandler The error handler
*/
public void setParserErrorHandler(IParserErrorHandler theParserErrorHandler) {
Validate.notNull(theParserErrorHandler, "theParserErrorHandler must not be null");
myParserErrorHandler = theParserErrorHandler;
}
/**
* Sets the parser options object which will be used to supply default
* options to newly created parsers
*
* @param theParserOptions
* The parser options object - Must not be <code>null</code>
*/
public void setParserOptions(ParserOptions theParserOptions) {
Validate.notNull(theParserOptions, "theParserOptions must not be null");
myParserOptions = theParserOptions;
}
/**
* Sets the configured performance options
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(Collection<PerformanceOptionsEnum> theOptions) {
myPerformanceOptions.clear();
if (theOptions != null) {
myPerformanceOptions.addAll(theOptions);
}
}
/**
* Sets the configured performance options
*
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(PerformanceOptionsEnum... thePerformanceOptions) {
@ -808,26 +857,7 @@ public class FhirContext {
setPerformanceOptions(asList);
}
/**
* Set the restful client factory
*
* @param theRestfulClientFactory
*/
public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
this.myRestfulClientFactory = theRestfulClientFactory;
}
/**
* Sets the validation support module to use for this context. The validation support module
* is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
* as well as to provide terminology services to modules such as the validator and FluentPath executor
*/
public void setValidationSupport(IContextValidationSupport<?, ?, ?, ?, ?, ?> theValidationSupport) {
myValidationSupport = theValidationSupport;
}
@SuppressWarnings({ "cast" })
@SuppressWarnings({"cast"})
private List<Class<? extends IElement>> toElementList(Collection<Class<? extends IBaseResource>> theResourceTypes) {
if (theResourceTypes == null) {
return null;
@ -858,13 +888,6 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
*/
public static FhirContext forDstu2_1() {
return new FhirContext(FhirVersionEnum.DSTU2_1);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2_HL7ORG DSTU2} (using the Reference
* Implementation Structures)
@ -873,9 +896,16 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2_HL7ORG);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
*/
public static FhirContext forDstu2_1() {
return new FhirContext(FhirVersionEnum.DSTU2_1);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
*
*
* @since 1.4
*/
public static FhirContext forDstu3() {
@ -884,14 +914,13 @@ public class FhirContext {
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
*
*
* @since 3.0.0
*/
public static FhirContext forR4() {
return new FhirContext(FhirVersionEnum.R4);
}
private static Collection<Class<? extends IBaseResource>> toCollection(Class<? extends IBaseResource> theResourceType) {
ArrayList<Class<? extends IBaseResource>> retVal = new ArrayList<Class<? extends IBaseResource>>(1);
retVal.add(theResourceType);
@ -909,34 +938,4 @@ public class FhirContext {
}
return retVal;
}
/**
* Returns an unmodifiable set containing all resource names known to this
* context
*/
public Set<String> getResourceNames() {
Set<String> resourceNames= new HashSet<>();
if (myNameToResourceDefinition.isEmpty()) {
Properties props = new Properties();
try {
props.load(myVersion.getFhirVersionPropertiesFile());
} catch (IOException theE) {
throw new ConfigurationException("Failed to load version properties file");
}
Enumeration<?> propNames = props.propertyNames();
while (propNames.hasMoreElements()){
String next = (String) propNames.nextElement();
if (next.startsWith("resource.")) {
resourceNames.add(next.substring("resource.".length()).trim());
}
}
}
for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
resourceNames.add(next.getName());
}
return Collections.unmodifiableSet(resourceNames);
}
}

View File

@ -20,31 +20,21 @@ package ca.uhn.fhir.model.view;
* #L%
*/
import ca.uhn.fhir.context.*;
import org.hl7.fhir.instance.model.api.*;
import java.util.List;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeChildDeclaredExtensionDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.model.api.BaseElement;
import ca.uhn.fhir.model.api.ExtensionDt;
import ca.uhn.fhir.model.api.IResource;
public class ViewGenerator {
private FhirContext myCtx;
public ViewGenerator(FhirContext theFhirContext) {
myCtx=theFhirContext;
myCtx = theFhirContext;
}
public <T extends IResource> T newView(IResource theResource, Class<T> theTargetType) {
Class<? extends IResource> sourceType = theResource.getClass();
public <T extends IBaseResource> T newView(IBaseResource theResource, Class<T> theTargetType) {
Class<? extends IBaseResource> sourceType = theResource.getClass();
RuntimeResourceDefinition sourceDef = myCtx.getResourceDefinition(theResource);
RuntimeResourceDefinition targetDef = myCtx.getResourceDefinition(theTargetType);
@ -57,18 +47,16 @@ public class ViewGenerator {
T retVal;
try {
retVal = theTargetType.newInstance();
} catch (InstantiationException e) {
throw new ConfigurationException("Failed to instantiate " + theTargetType, e);
} catch (IllegalAccessException e) {
} catch (Exception e) {
throw new ConfigurationException("Failed to instantiate " + theTargetType, e);
}
copyChildren(sourceDef, (BaseElement) theResource, targetDef, (BaseElement) retVal);
copyChildren(sourceDef, (IBase) theResource, targetDef, (IBase) retVal);
return retVal;
}
private void copyChildren(BaseRuntimeElementCompositeDefinition<?> theSourceDef, BaseElement theSource, BaseRuntimeElementCompositeDefinition<?> theTargetDef, BaseElement theTarget) {
private void copyChildren(BaseRuntimeElementCompositeDefinition<?> theSourceDef, IBase theSource, BaseRuntimeElementCompositeDefinition<?> theTargetDef, IBase theTarget) {
if (!theSource.isEmpty()) {
List<BaseRuntimeChildDefinition> targetChildren = theTargetDef.getChildren();
List<RuntimeChildDeclaredExtensionDefinition> targetExts = theTargetDef.getExtensions();
@ -79,7 +67,7 @@ public class ViewGenerator {
if (nextChild.getValidChildNames().size() > 1) {
elementName = nextChild.getValidChildNames().iterator().next();
}
BaseRuntimeChildDefinition sourceChildEquivalent = theSourceDef.getChildByNameOrThrowDataFormatException(elementName);
if (sourceChildEquivalent == null) {
continue;
@ -89,7 +77,7 @@ public class ViewGenerator {
for (IBase nextElement : sourceValues) {
boolean handled = false;
if (nextElement instanceof IBaseExtension) {
String url = ((IBaseExtension<?,?>) nextElement).getUrl();
String url = ((IBaseExtension<?, ?>) nextElement).getUrl();
for (RuntimeChildDeclaredExtensionDefinition nextExt : targetExts) {
String nextTargetUrl = nextExt.getExtensionUrl();
if (!nextTargetUrl.equals(url)) {
@ -97,40 +85,49 @@ public class ViewGenerator {
}
addExtension(theSourceDef, theSource, theTarget, nextExt, url);
handled = true;
}
}
}
}
if (!handled) {
nextChild.getMutator().addValue(theTarget, nextElement);
}
}
}
for (RuntimeChildDeclaredExtensionDefinition nextExt : targetExts) {
String url = nextExt.getExtensionUrl();
addExtension(theSourceDef, theSource, theTarget, nextExt, url);
}
}
}
private void addExtension(BaseRuntimeElementCompositeDefinition<?> theSourceDef, BaseElement theSource, BaseElement theTarget, RuntimeChildDeclaredExtensionDefinition nextExt, String url) {
private void addExtension(BaseRuntimeElementCompositeDefinition<?> theSourceDef, IBase theSource, IBase theTarget, RuntimeChildDeclaredExtensionDefinition nextExt, String url) {
RuntimeChildDeclaredExtensionDefinition sourceDeclaredExt = theSourceDef.getDeclaredExtension(url, "");
if (sourceDeclaredExt == null) {
for (ExtensionDt next : theSource.getAllUndeclaredExtensions()) {
if (next.getUrlAsString().equals(url)) {
nextExt.getMutator().addValue(theTarget, next.getValue());
if (theSource instanceof IBaseHasExtensions) {
for (IBaseExtension<?, ?> next : ((IBaseHasExtensions) theSource).getExtension()) {
if (next.getUrl().equals(url)) {
nextExt.getMutator().addValue(theTarget, next.getValue());
}
}
}
if (theSource instanceof IBaseHasModifierExtensions) {
for (IBaseExtension<?, ?> next : ((IBaseHasModifierExtensions) theSource).getModifierExtension()) {
if (next.getUrl().equals(url)) {
nextExt.getMutator().addValue(theTarget, next.getValue());
}
}
}
} else {
List<? extends IBase> values = sourceDeclaredExt.getAccessor().getValues(theSource);
for (IBase nextElement : values) {
nextExt.getMutator().addValue(theTarget, nextElement);
}
}
}
}

View File

@ -78,11 +78,6 @@ public @interface Search {
// NB: Read, Search (maybe others) share this annotation method, so update the javadocs everywhere
Class<? extends IBaseResource> type() default IBaseResource.class;
/**
* This is an experimental option - Use with caution
*/
boolean dynamic() default false;
/**
* In a REST server, should this method be invoked even if it does not have method parameters
* which correspond to all of the URL parameters passed in by the client (default is <code>false</code>).

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.rest.api;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -109,6 +109,7 @@ public class Constants {
public static final String HEADER_PREFER_RETURN = "return";
public static final String HEADER_PREFER_RETURN_MINIMAL = "minimal";
public static final String HEADER_PREFER_RETURN_REPRESENTATION = "representation";
public static final String HEADER_PREFER_RETURN_OPERATION_OUTCOME = "OperationOutcome";
public static final String HEADER_SUFFIX_CT_UTF_8 = "; charset=UTF-8";
public static final String HEADERVALUE_CORS_ALLOW_METHODS_ALL = "GET, POST, PUT, DELETE, OPTIONS";
public static final Map<Integer, String> HTTP_STATUS_NAMES;
@ -124,7 +125,7 @@ public class Constants {
/**
* Used in paging links
*/
public static final Object PARAM_BUNDLETYPE = "_bundletype";
public static final String PARAM_BUNDLETYPE = "_bundletype";
public static final String PARAM_CONTENT = "_content";
public static final String PARAM_COUNT = "_count";
public static final String PARAM_DELETE = "_delete";
@ -134,7 +135,7 @@ public class Constants {
public static final String PARAM_HISTORY = "_history";
public static final String PARAM_INCLUDE = "_include";
public static final String PARAM_INCLUDE_QUALIFIER_RECURSE = ":recurse";
public static final String PARAM_INCLUDE_RECURSE = "_include"+PARAM_INCLUDE_QUALIFIER_RECURSE;
public static final String PARAM_INCLUDE_RECURSE = "_include" + PARAM_INCLUDE_QUALIFIER_RECURSE;
public static final String PARAM_LASTUPDATED = "_lastUpdated";
public static final String PARAM_NARRATIVE = "_narrative";
public static final String PARAM_PAGINGACTION = "_getpages";
@ -146,7 +147,7 @@ public class Constants {
public static final String PARAM_QUERY = "_query";
public static final String PARAM_RESPONSE_URL = "response-url"; //Used in messaging
public static final String PARAM_REVINCLUDE = "_revinclude";
public static final String PARAM_REVINCLUDE_RECURSE = PARAM_REVINCLUDE+PARAM_INCLUDE_QUALIFIER_RECURSE;
public static final String PARAM_REVINCLUDE_RECURSE = PARAM_REVINCLUDE + PARAM_INCLUDE_QUALIFIER_RECURSE;
public static final String PARAM_SEARCH = "_search";
public static final String PARAM_SECURITY = "_security";
public static final String PARAM_SINCE = "_since";
@ -154,9 +155,9 @@ public class Constants {
public static final String PARAM_SORT_ASC = "_sort:asc";
public static final String PARAM_SORT_DESC = "_sort:desc";
public static final String PARAM_SUMMARY = "_summary";
public static final String PARAM_TAG = "_tag";
public static final String PARAM_TAGS = "_tags";
public static final String PARAM_TEXT = "_text";
public static final String PARAM_TAG = "_tag";
public static final String PARAM_TAGS = "_tags";
public static final String PARAM_TEXT = "_text";
public static final String PARAM_VALIDATE = "_validate";
public static final String PARAMQUALIFIER_MISSING = ":missing";
public static final String PARAMQUALIFIER_MISSING_FALSE = "false";
@ -171,7 +172,7 @@ public class Constants {
public static final int STATUS_HTTP_400_BAD_REQUEST = 400;
public static final int STATUS_HTTP_401_CLIENT_UNAUTHORIZED = 401;
public static final int STATUS_HTTP_403_FORBIDDEN = 403;
public static final int STATUS_HTTP_404_NOT_FOUND = 404;
public static final int STATUS_HTTP_405_METHOD_NOT_ALLOWED = 405;
public static final int STATUS_HTTP_409_CONFLICT = 409;
@ -189,9 +190,16 @@ public class Constants {
public static final String HEADER_X_CACHE = "X-Cache";
public static final String HEADER_X_SECURITY_CONTEXT = "X-Security-Context";
public static final String POWERED_BY_HEADER = "X-Powered-By";
public static final Charset CHARSET_US_ASCII;
public static final String PARAM_PAGEID = "_pageId";
/**
* This is provided for testing only! Use with caution as this property may change.
*/
public static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS";
static {
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);
CHARSET_US_ASCII = Charset.forName("ISO-8859-1");
HashMap<Integer, String> statusNames = new HashMap<>();
statusNames.put(200, "OK");
@ -257,7 +265,7 @@ public class Constants {
statusNames.put(510, "Not Extended");
statusNames.put(511, "Network Authentication Required");
HTTP_STATUS_NAMES = Collections.unmodifiableMap(statusNames);
Set<String> formatsHtml = new HashSet<>();
formatsHtml.add(CT_HTML);
formatsHtml.add(FORMAT_HTML);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.rest.api;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

View File

@ -27,7 +27,7 @@ import java.util.HashMap;
*/
public enum PreferReturnEnum {
REPRESENTATION("representation"), MINIMAL("minimal");
REPRESENTATION("representation"), MINIMAL("minimal"), OPERATION_OUTCOME("OperationOutcome");
private String myHeaderValue;
private static HashMap<String, PreferReturnEnum> ourValues;

View File

@ -34,36 +34,35 @@ public interface IHttpRequest {
* @param theName the header name
* @param theValue the header value
*/
public void addHeader(String theName, String theValue);
void addHeader(String theName, String theValue);
/**
* Execute the request
* @return the response
* @throws IOException
*/
public IHttpResponse execute() throws IOException;
IHttpResponse execute() throws IOException;
/**
* @return all request headers in lower case
* @return all request headers in lower case. Note that this method
* returns an <b>immutable</b> Map
*/
public Map<String, List<String>> getAllHeaders();
Map<String, List<String>> getAllHeaders();
/**
* Return the requestbody as a string.
* Return the request body as a string.
* If this is not supported by the underlying technology, null is returned
* @return a string representation of the request or null if not supported or empty.
* @throws IOException
*/
public String getRequestBodyFromStream() throws IOException;
String getRequestBodyFromStream() throws IOException;
/**
* Return the request URI, or null
*/
public String getUri();
String getUri();
/**
* Return the HTTP verb (e.g. "GET")
*/
public String getHttpVerbName();
String getHttpVerbName();
}

View File

@ -1,9 +1,10 @@
package ca.uhn.fhir.rest.gclient;
import java.util.*;
import ca.uhn.fhir.model.api.IQueryParameterType;
import java.util.List;
import java.util.Map;
/*
* #%L
* HAPI FHIR - Core Library
@ -26,10 +27,32 @@ import ca.uhn.fhir.model.api.IQueryParameterType;
public interface IBaseQuery<T extends IBaseQuery<?>> {
T where(ICriterion<?> theCriterion);
/**
* Add a search parameter to the query.
* <p>
* Note that this method is a synonym for {@link #where(ICriterion)}, and is only
* here to make fluent queries read more naturally.
* </p>
*/
T and(ICriterion<?> theCriterion);
T where(Map<String, List<IQueryParameterType>> theCriterion);
/**
* Add a set of search parameters to the query.
*/
T where(Map<String, List<IQueryParameterType>> theCriterion);
T and(ICriterion<?> theCriterion);
/**
* Add a search parameter to the query.
*/
T where(ICriterion<?> theCriterion);
/**
* Add a set of search parameters to the query.
* <p>
* Values will be treated semi-literally. No FHIR escaping will be performed
* on the values, but regular URL escaping will be.
* </p>
*/
T whereMap(Map<String, List<String>> theRawMap);
}

View File

@ -20,16 +20,23 @@ package ca.uhn.fhir.rest.gclient;
* #L%
*/
import java.util.Date;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
public interface IHistoryTyped<T> extends IClientExecutable<IHistoryTyped<T>, T> {
import java.util.Date;
public interface IHistoryTyped<T> extends IClientExecutable<IHistoryTyped<T>, T> {
/**
* Request that the server return only resource versions that were created at or after the given time (inclusive)
* Request that the server return only the history elements between the
* specific range
*/
IHistoryTyped<T> since(Date theCutoff);
IHistoryTyped<T> at(DateRangeParam theDateRangeParam);
/**
* Request that the server return only up to <code>theCount</code> number of resources
*/
IHistoryTyped<T> count(Integer theCount);
/**
* Request that the server return only resource versions that were created at or after the given time (inclusive)
@ -41,9 +48,9 @@ public interface IHistoryTyped<T> extends IClientExecutable<IHistoryTyped<T>, T>
IHistoryTyped<T> since(IPrimitiveType<Date> theCutoff);
/**
* Request that the server return only up to <code>theCount</code> number of resources
* Request that the server return only resource versions that were created at or after the given time (inclusive)
*/
IHistoryTyped<T> count(Integer theCount);
IHistoryTyped<T> since(Date theCutoff);
}

View File

@ -1,6 +1,14 @@
package ca.uhn.fhir.rest.gclient;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.SearchStyleEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/*
* #%L
@ -22,14 +30,23 @@ import java.util.Collection;
* #L%
*/
import org.hl7.fhir.instance.model.api.IBaseBundle;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.SearchStyleEnum;
import ca.uhn.fhir.rest.param.DateRangeParam;
public interface IQuery<Y> extends IBaseQuery<IQuery<Y>>, IClientExecutable<IQuery<Y>, Y> {
/**
* {@inheritDoc}
*/
// This is here as an overridden method to allow mocking clients with Mockito to work
@Override
IQuery<Y> and(ICriterion<?> theCriterion);
/**
* Specifies the <code>_count</code> parameter, which indicates to the server how many resources should be returned
* on a single page.
*
* @since 1.4
*/
IQuery<Y> count(int theCount);
/**
* Add an "_include" specification or an "_include:recurse" specification. If you are using
* a constant from one of the built-in structures you can select whether you want recursive
@ -41,88 +58,60 @@ public interface IQuery<Y> extends IBaseQuery<IQuery<Y>>, IClientExecutable<IQue
*/
IQuery<Y> include(Include theInclude);
ISort<Y> sort();
/**
* Add a "_lastUpdated" specification
*
* @since HAPI FHIR 1.1 - Note that option was added to FHIR itself in DSTU2
*/
IQuery<Y> lastUpdated(DateRangeParam theLastUpdated);
/**
* Specifies the <code>_count</code> parameter, which indicates to the server how many resources should be returned
* on a single page.
*
*
* @deprecated This parameter is badly named, since FHIR calls this parameter "_count" and not "_limit". Use {@link #count(int)} instead (it also sets the _count parameter)
*/
@Deprecated
IQuery<Y> limitTo(int theLimitTo);
/**
* Specifies the <code>_count</code> parameter, which indicates to the server how many resources should be returned
* on a single page.
*
* @since 1.4
* Request that the client return the specified bundle type, e.g. <code>org.hl7.fhir.instance.model.Bundle.class</code>
* or <code>ca.uhn.fhir.model.dstu2.resource.Bundle.class</code>
*/
IQuery<Y> count(int theCount);
/**
* Match only resources where the resource has the given tag. This parameter corresponds to
* the <code>_tag</code> URL parameter.
* @param theSystem The tag code system, or <code>null</code> to match any code system (this may not be supported on all servers)
* @param theCode The tag code. Must not be <code>null</code> or empty.
*/
IQuery<Y> withTag(String theSystem, String theCode);
/**
* Match only resources where the resource has the given security tag. This parameter corresponds to
* the <code>_security</code> URL parameter.
* @param theSystem The tag code system, or <code>null</code> to match any code system (this may not be supported on all servers)
* @param theCode The tag code. Must not be <code>null</code> or empty.
*/
IQuery<Y> withSecurity(String theSystem, String theCode);
/**
* Match only resources where the resource has the given profile declaration. This parameter corresponds to
* the <code>_profile</code> URL parameter.
* @param theProfileUri The URI of a given profile to search for resources which match
*/
IQuery<Y> withProfile(String theProfileUri);
/**
* Matches any of the profiles given as argument. This would result in an OR search for resources matching one or more profiles.
* To do an AND search, make multiple calls to {@link #withProfile(String)}.
* @param theProfileUris The URIs of a given profile to search for resources which match.
*/
IQuery<Y> withAnyProfile(Collection<String> theProfileUris);
/**
* Forces the query to perform the search using the given method (allowable methods are described in the
* <a href="http://www.hl7.org/fhir/search.html">FHIR Search Specification</a>)
* <p>
* This can be used to force the use of an HTTP POST instead of an HTTP GET
* </p>
*
* @see SearchStyleEnum
* @since 0.6
*/
IQuery<Y> usingStyle(SearchStyleEnum theStyle);
IQuery<Y> withIdAndCompartment(String theResourceId, String theCompartmentName);
<B extends IBaseBundle> IQuery<B> returnBundle(Class<B> theClass);
/**
* Add a "_revinclude" specification
*
*
* @since HAPI FHIR 1.0 - Note that option was added to FHIR itself in DSTU2
*/
IQuery<Y> revInclude(Include theIncludeTarget);
/**
* Add a "_lastUpdated" specification
*
* @since HAPI FHIR 1.1 - Note that option was added to FHIR itself in DSTU2
* Adds a sort criteria
*
* @see #sort(SortSpec) for an alternate way of speciyfing sorts
*/
IQuery<Y> lastUpdated(DateRangeParam theLastUpdated);
ISort<Y> sort();
/**
* Request that the client return the specified bundle type, e.g. <code>org.hl7.fhir.instance.model.Bundle.class</code>
* or <code>ca.uhn.fhir.model.dstu2.resource.Bundle.class</code>
* Adds a sort using a {@link SortSpec} object
*
* @see #sort() for an alternate way of speciyfing sorts
*/
<B extends IBaseBundle> IQuery<B> returnBundle(Class<B> theClass);
IQuery<Y> sort(SortSpec theSortSpec);
/**
* Forces the query to perform the search using the given method (allowable methods are described in the
* <a href="http://www.hl7.org/fhir/search.html">FHIR Search Specification</a>)
* <p>
* This can be used to force the use of an HTTP POST instead of an HTTP GET
* </p>
*
* @see SearchStyleEnum
* @since 0.6
*/
IQuery<Y> usingStyle(SearchStyleEnum theStyle);
/**
* {@inheritDoc}
@ -132,11 +121,40 @@ public interface IQuery<Y> extends IBaseQuery<IQuery<Y>>, IClientExecutable<IQue
IQuery<Y> where(ICriterion<?> theCriterion);
/**
* {@inheritDoc}
* Matches any of the profiles given as argument. This would result in an OR search for resources matching one or more profiles.
* To do an AND search, make multiple calls to {@link #withProfile(String)}.
*
* @param theProfileUris The URIs of a given profile to search for resources which match.
*/
// This is here as an overridden method to allow mocking clients with Mockito to work
@Override
IQuery<Y> and(ICriterion<?> theCriterion);
IQuery<Y> withAnyProfile(Collection<String> theProfileUris);
IQuery<Y> withIdAndCompartment(String theResourceId, String theCompartmentName);
/**
* Match only resources where the resource has the given profile declaration. This parameter corresponds to
* the <code>_profile</code> URL parameter.
*
* @param theProfileUri The URI of a given profile to search for resources which match
*/
IQuery<Y> withProfile(String theProfileUri);
/**
* Match only resources where the resource has the given security tag. This parameter corresponds to
* the <code>_security</code> URL parameter.
*
* @param theSystem The tag code system, or <code>null</code> to match any code system (this may not be supported on all servers)
* @param theCode The tag code. Must not be <code>null</code> or empty.
*/
IQuery<Y> withSecurity(String theSystem, String theCode);
/**
* Match only resources where the resource has the given tag. This parameter corresponds to
* the <code>_tag</code> URL parameter.
*
* @param theSystem The tag code system, or <code>null</code> to match any code system (this may not be supported on all servers)
* @param theCode The tag code. Must not be <code>null</code> or empty.
*/
IQuery<Y> withTag(String theSystem, String theCode);
// Y execute();

View File

@ -23,13 +23,13 @@ package ca.uhn.fhir.rest.gclient;
public interface ISort<T> {
/**
* Sort ascending
* Sort ascending
*/
IQuery<T> ascending(IParam theParam);
/**
* Sort ascending
*
* Sort ascending
*
* @param theParam The param name, e.g. "address"
*/
IQuery<T> ascending(String theParam);
@ -37,22 +37,30 @@ public interface ISort<T> {
/**
* Sort by the default order. Note that as of STU3, there is no longer
* a concept of default order, only ascending and descending. This method
* technically implies "ascending" but it makes more sense to use
* technically implies "ascending" but it makes more sense to use
* {@link #ascending(IParam)}
*/
IQuery<T> defaultOrder(IParam theParam);
/**
* Sort by the default order. Note that as of STU3, there is no longer
* a concept of default order, only ascending and descending. This method
* technically implies "ascending" but it makes more sense to use
* {@link #ascending(IParam)}
*/
IQuery<T> defaultOrder(String theParam);
/**
* Sort descending
*
* @param theParam A query param - Could be a constant such as <code>Patient.ADDRESS</code> or a custom
* param such as <code>new StringClientParam("foo")</code>
*
* @param theParam A query param - Could be a constant such as <code>Patient.ADDRESS</code> or a custom
* param such as <code>new StringClientParam("foo")</code>
*/
IQuery<T> descending(IParam theParam);
/**
* Sort ascending
*
* Sort ascending
*
* @param theParam The param name, e.g. "address"
*/
IQuery<T> descending(String theParam);

View File

@ -20,12 +20,12 @@ package ca.uhn.fhir.rest.gclient;
* #L%
*/
import ca.uhn.fhir.rest.api.Constants;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.Arrays;
import java.util.List;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.api.Constants;
/**
*
* @author james
@ -59,6 +59,13 @@ public class StringClientParam extends BaseClientParam implements IParam {
return new StringExactly();
}
/**
* The string contains given value
*/
public IStringMatch contains() {
return new StringContains();
}
public interface IStringMatch {
/**
@ -76,7 +83,7 @@ public class StringClientParam extends BaseClientParam implements IParam {
/**
* Requests that resources be returned which match the given value
*/
ICriterion<StringClientParam> value(StringDt theValue);
ICriterion<StringClientParam> value(IPrimitiveType<String> theValue);
/**
* Requests that resources be returned which match ANY of the given values (this is an OR search, not an AND search). Note that to
@ -90,17 +97,17 @@ public class StringClientParam extends BaseClientParam implements IParam {
private class StringExactly implements IStringMatch {
@Override
public ICriterion<StringClientParam> value(String theValue) {
return new StringCriterion<StringClientParam>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue);
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue);
}
@Override
public ICriterion<StringClientParam> value(StringDt theValue) {
return new StringCriterion<StringClientParam>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue.getValue());
public ICriterion<StringClientParam> value(IPrimitiveType<String> theValue) {
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue.getValue());
}
@Override
public ICriterion<StringClientParam> values(List<String> theValue) {
return new StringCriterion<StringClientParam>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue);
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, theValue);
}
@Override
@ -109,20 +116,42 @@ public class StringClientParam extends BaseClientParam implements IParam {
}
}
private class StringMatches implements IStringMatch {
private class StringContains implements IStringMatch {
@Override
public ICriterion<StringClientParam> value(String theValue) {
return new StringCriterion<StringClientParam>(getParamName(), theValue);
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, theValue);
}
@Override
public ICriterion<StringClientParam> value(StringDt theValue) {
return new StringCriterion<StringClientParam>(getParamName(), theValue.getValue());
public ICriterion<StringClientParam> value(IPrimitiveType<String> theValue) {
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, theValue.getValue());
}
@Override
public ICriterion<StringClientParam> values(List<String> theValue) {
return new StringCriterion<StringClientParam>(getParamName(), theValue);
return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, theValue);
}
@Override
public ICriterion<?> values(String... theValues) {
return new StringCriterion<StringClientParam>(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, Arrays.asList(theValues));
}
}
private class StringMatches implements IStringMatch {
@Override
public ICriterion<StringClientParam> value(String theValue) {
return new StringCriterion<>(getParamName(), theValue);
}
@Override
public ICriterion<StringClientParam> value(IPrimitiveType<String> theValue) {
return new StringCriterion<>(getParamName(), theValue.getValue());
}
@Override
public ICriterion<StringClientParam> values(List<String> theValue) {
return new StringCriterion<>(getParamName(), theValue);
}
@Override

View File

@ -1,8 +1,18 @@
package ca.uhn.fhir.rest.param;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN_OR_EQUALS;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.LESSTHAN_OR_EQUALS;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.*;
import static java.lang.String.format;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -25,20 +35,12 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* limitations under the License.
* #L%
*/
import java.util.*;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@SuppressWarnings("UnusedReturnValue")
public class DateRangeParam implements IQueryParameterAnd<DateParam> {
private static final long serialVersionUID = 1L;
private DateParam myLowerBound;
private DateParam myUpperBound;
@ -52,15 +54,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
/**
* Constructor which takes two Dates representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(Date theLowerBound, Date theUpperBound) {
this();
@ -84,37 +84,35 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
setRangeFromDatesInclusive(theDateParam.getValueAsString(), theDateParam.getValueAsString());
} else {
switch (theDateParam.getPrefix()) {
case EQUAL:
setRangeFromDatesInclusive(theDateParam.getValueAsString(), theDateParam.getValueAsString());
break;
case STARTS_AFTER:
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
validateAndSet(theDateParam, null);
break;
case ENDS_BEFORE:
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
validateAndSet(null, theDateParam);
break;
default:
// Should not happen
throw new InvalidRequestException("Invalid comparator for date range parameter:" + theDateParam.getPrefix() + ". This is a bug.");
case EQUAL:
setRangeFromDatesInclusive(theDateParam.getValueAsString(), theDateParam.getValueAsString());
break;
case STARTS_AFTER:
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
validateAndSet(theDateParam, null);
break;
case ENDS_BEFORE:
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
validateAndSet(null, theDateParam);
break;
default:
// Should not happen
throw new InvalidRequestException("Invalid comparator for date range parameter:" + theDateParam.getPrefix() + ". This is a bug.");
}
}
}
/**
* Constructor which takes two Dates representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(DateParam theLowerBound, DateParam theUpperBound) {
this();
@ -123,15 +121,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
/**
* Constructor which takes two Dates representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(IPrimitiveType<Date> theLowerBound, IPrimitiveType<Date> theUpperBound) {
this();
@ -140,15 +136,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
/**
* Constructor which takes two strings representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* An unqualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Either theLowerBound or theUpperBound may both be populated, or
* one may be null, but it is not valid for both to be null.
* @param theUpperBound
* An unqualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Either theLowerBound or theUpperBound may both be populated, or
* one may be null, but it is not valid for both to be null.
*
* @param theLowerBound An unqualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Either theLowerBound or theUpperBound may both be populated, or
* one may be null, but it is not valid for both to be null.
* @param theUpperBound An unqualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Either theLowerBound or theUpperBound may both be populated, or
* one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(String theLowerBound, String theUpperBound) {
this();
@ -168,35 +162,99 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
myLowerBound = new DateParam(EQUAL, theParsed.getValueAsString());
myUpperBound = new DateParam(EQUAL, theParsed.getValueAsString());
}
} else {
switch (theParsed.getPrefix()) {
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
if (myLowerBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify a lower bound");
}
myLowerBound = theParsed;
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
if (myUpperBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify an upper bound");
}
myUpperBound = theParsed;
break;
default:
throw new InvalidRequestException("Unknown comparator: " + theParsed.getPrefix());
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
if (myLowerBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify a lower bound");
}
myLowerBound = theParsed;
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
if (myUpperBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify an upper bound");
}
myUpperBound = theParsed;
break;
default:
throw new InvalidRequestException("Unknown comparator: " + theParsed.getPrefix());
}
}
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof DateRangeParam)) {
return false;
}
DateRangeParam other = (DateRangeParam) obj;
return Objects.equals(myLowerBound, other.myLowerBound) &&
Objects.equals(myUpperBound, other.myUpperBound);
}
public DateParam getLowerBound() {
return myLowerBound;
}
public DateRangeParam setLowerBound(DateParam theLowerBound) {
validateAndSet(theLowerBound, myUpperBound);
return this;
}
/**
* Sets the lower bound using a string that is compliant with
* FHIR dateTime format (ISO-8601).
* <p>
* This lower bound is assumed to have a <code>ge</code>
* (greater than or equals) modifier.
* </p>
*/
public DateRangeParam setLowerBound(String theLowerBound) {
setLowerBound(new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound));
return this;
}
/**
* Sets the lower bound to be greaterthan or equal to the given date
*/
public DateRangeParam setLowerBoundInclusive(Date theLowerBound) {
validateAndSet(new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, theLowerBound), myUpperBound);
return this;
}
/**
* Sets the upper bound to be greaterthan or equal to the given date
*/
public DateRangeParam setUpperBoundInclusive(Date theUpperBound) {
validateAndSet(myLowerBound, new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, theUpperBound));
return this;
}
/**
* Sets the lower bound to be greaterthan to the given date
*/
public DateRangeParam setLowerBoundExclusive(Date theLowerBound) {
validateAndSet(new DateParam(ParamPrefixEnum.GREATERTHAN, theLowerBound), myUpperBound);
return this;
}
/**
* Sets the upper bound to be greaterthan to the given date
*/
public DateRangeParam setUpperBoundExclusive(Date theUpperBound) {
validateAndSet(myLowerBound, new DateParam(ParamPrefixEnum.LESSTHAN, theUpperBound));
return this;
}
public Date getLowerBoundAsInstant() {
if (myLowerBound == null) {
return null;
@ -204,19 +262,19 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
Date retVal = myLowerBound.getValue();
if (myLowerBound.getPrefix() != null) {
switch (myLowerBound.getPrefix()) {
case GREATERTHAN:
case STARTS_AFTER:
retVal = myLowerBound.getPrecision().add(retVal, 1);
break;
case EQUAL:
case GREATERTHAN_OR_EQUALS:
break;
case LESSTHAN:
case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE:
case NOT_EQUAL:
throw new IllegalStateException("Unvalid lower bound comparator: " + myLowerBound.getPrefix());
case GREATERTHAN:
case STARTS_AFTER:
retVal = myLowerBound.getPrecision().add(retVal, 1);
break;
case EQUAL:
case GREATERTHAN_OR_EQUALS:
break;
case LESSTHAN:
case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE:
case NOT_EQUAL:
throw new IllegalStateException("Unvalid lower bound comparator: " + myLowerBound.getPrefix());
}
}
return retVal;
@ -226,6 +284,24 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return myUpperBound;
}
/**
* Sets the upper bound using a string that is compliant with
* FHIR dateTime format (ISO-8601).
* <p>
* This upper bound is assumed to have a <code>le</code>
* (less than or equals) modifier.
* </p>
*/
public DateRangeParam setUpperBound(String theUpperBound) {
setUpperBound(new DateParam(LESSTHAN_OR_EQUALS, theUpperBound));
return this;
}
public DateRangeParam setUpperBound(DateParam theUpperBound) {
validateAndSet(myLowerBound, theUpperBound);
return this;
}
public Date getUpperBoundAsInstant() {
if (myUpperBound == null) {
return null;
@ -233,21 +309,21 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
Date retVal = myUpperBound.getValue();
if (myUpperBound.getPrefix() != null) {
switch (myUpperBound.getPrefix()) {
case LESSTHAN:
case ENDS_BEFORE:
retVal = new Date(retVal.getTime() - 1L);
break;
case EQUAL:
case LESSTHAN_OR_EQUALS:
retVal = myUpperBound.getPrecision().add(retVal, 1);
retVal = new Date(retVal.getTime() - 1L);
break;
case GREATERTHAN_OR_EQUALS:
case GREATERTHAN:
case APPROXIMATE:
case NOT_EQUAL:
case STARTS_AFTER:
throw new IllegalStateException("Unvalid upper bound comparator: " + myUpperBound.getPrefix());
case LESSTHAN:
case ENDS_BEFORE:
retVal = new Date(retVal.getTime() - 1L);
break;
case EQUAL:
case LESSTHAN_OR_EQUALS:
retVal = myUpperBound.getPrecision().add(retVal, 1);
retVal = new Date(retVal.getTime() - 1L);
break;
case GREATERTHAN_OR_EQUALS:
case GREATERTHAN:
case APPROXIMATE:
case NOT_EQUAL:
case STARTS_AFTER:
throw new IllegalStateException("Unvalid upper bound comparator: " + myUpperBound.getPrefix());
}
}
return retVal;
@ -273,46 +349,42 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return bound != null && !bound.isEmpty();
}
@Override
public int hashCode() {
return Objects.hash(myLowerBound, myUpperBound);
}
public boolean isEmpty() {
return (getLowerBoundAsInstant() == null) && (getUpperBoundAsInstant() == null);
}
public DateRangeParam setLowerBound(DateParam theLowerBound) {
validateAndSet(theLowerBound, myUpperBound);
return this;
}
/**
* Sets the range from a pair of dates, inclusive on both ends
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public void setRangeFromDatesInclusive(Date theLowerBound, Date theUpperBound) {
DateParam lowerBound = theLowerBound != null
? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) : null;
? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) : null;
DateParam upperBound = theUpperBound != null
? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) : null;
? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) : null;
validateAndSet(lowerBound, upperBound);
}
/**
* Sets the range from a pair of dates, inclusive on both ends
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public void setRangeFromDatesInclusive(DateParam theLowerBound, DateParam theUpperBound) {
validateAndSet(theLowerBound, theUpperBound);
@ -322,15 +394,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
* Sets the range from a pair of dates, inclusive on both ends. Note that if
* theLowerBound is after theUpperBound, thie method will automatically reverse
* the order of the arguments in order to create an inclusive range.
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public void setRangeFromDatesInclusive(IPrimitiveType<Date> theLowerBound, IPrimitiveType<Date> theUpperBound) {
IPrimitiveType<Date> lowerBound = theLowerBound;
@ -349,23 +419,21 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
/**
* Sets the range from a pair of dates, inclusive on both ends
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*
* @param theLowerBound A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or
* theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public void setRangeFromDatesInclusive(String theLowerBound, String theUpperBound) {
DateParam lowerBound = theLowerBound != null
? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound)
: null;
? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound)
: null;
DateParam upperBound = theUpperBound != null
? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound)
: null;
? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound)
: null;
if (isNotBlank(theLowerBound) && isNotBlank(theUpperBound) && theLowerBound.equals(theUpperBound)) {
lowerBound.setPrefix(EQUAL);
upperBound.setPrefix(EQUAL);
@ -373,14 +441,9 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
validateAndSet(lowerBound, upperBound);
}
public DateRangeParam setUpperBound(DateParam theUpperBound) {
validateAndSet(myLowerBound, theUpperBound);
return this;
}
@Override
public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, List<QualifiedParamList> theParameters)
throws InvalidRequestException {
throws InvalidRequestException {
boolean haveHadUnqualifiedParameter = false;
for (QualifiedParamList paramList : theParameters) {
@ -391,13 +454,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
throw new InvalidRequestException("DateRange parameter does not suppport OR queries");
}
String param = paramList.get(0);
/*
* Since ' ' is escaped as '+' we'll be nice to anyone might have accidentally not
* escaped theirs
*/
param = param.replace(' ', '+');
DateParam parsed = new DateParam();
parsed.setValueAsQueryToken(theContext, theParamName, paramList.getQualifier(), param);
addParam(parsed);
@ -413,24 +476,6 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof DateRangeParam)) {
return false;
}
DateRangeParam other = (DateRangeParam) obj;
return Objects.equals(myLowerBound, other.myLowerBound) &&
Objects.equals(myUpperBound, other.myUpperBound);
}
@Override
public int hashCode() {
return Objects.hash(myLowerBound, myUpperBound);
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
@ -463,8 +508,8 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
if (hasBound(lowerBound) && hasBound(upperBound)) {
if (lowerBound.getValue().getTime() > upperBound.getValue().getTime()) {
throw new DataFormatException(format(
"Lower bound of %s is after upper bound of %s",
lowerBound.getValueAsString(), upperBound.getValueAsString()));
"Lower bound of %s is after upper bound of %s",
lowerBound.getValueAsString(), upperBound.getValueAsString()));
}
}
@ -473,13 +518,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
lowerBound.setPrefix(GREATERTHAN_OR_EQUALS);
}
switch (lowerBound.getPrefix()) {
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
default:
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
throw new DataFormatException("Lower bound comparator must be > or >=, can not be " + lowerBound.getPrefix().getValue());
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
default:
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
throw new DataFormatException("Lower bound comparator must be > or >=, can not be " + lowerBound.getPrefix().getValue());
}
}
@ -488,17 +533,18 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
upperBound.setPrefix(LESSTHAN_OR_EQUALS);
}
switch (upperBound.getPrefix()) {
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
default:
break;
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
throw new DataFormatException("Upper bound comparator must be < or <=, can not be " + upperBound.getPrefix().getValue());
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
default:
break;
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
throw new DataFormatException("Upper bound comparator must be < or <=, can not be " + upperBound.getPrefix().getValue());
}
}
myLowerBound = lowerBound;
myUpperBound = upperBound;
}
}

View File

@ -19,11 +19,13 @@ package ca.uhn.fhir.rest.server.exceptions;
* limitations under the License.
* #L%
*/
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.CoverageIgnore;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
/**
* Represents an <b>HTTP 410 Resource Gone</b> response, which geenerally
@ -33,12 +35,12 @@ import ca.uhn.fhir.util.CoverageIgnore;
public class ResourceGoneException extends BaseServerResponseException {
public static final int STATUS_CODE = Constants.STATUS_HTTP_410_GONE;
private static final long serialVersionUID = 1L;
/**
* Constructor which creates an error message based on a given resource ID
*
* @param theResourceId
* The ID of the resource that could not be found
*
* @param theResourceId The ID of the resource that could not be found
*/
public ResourceGoneException(IIdType theResourceId) {
super(STATUS_CODE, "Resource " + (theResourceId != null ? theResourceId.getValue() : "") + " is gone/deleted");
@ -46,7 +48,7 @@ public class ResourceGoneException extends BaseServerResponseException {
/**
* @deprecated This constructor has a dependency on a specific model version and will be removed. Deprecated in HAPI
* 1.6 - 2016-07-02
* 1.6 - 2016-07-02
*/
@Deprecated
public ResourceGoneException(Class<? extends IBaseResource> theClass, BaseIdentifierDt thePatientId) {
@ -55,11 +57,9 @@ public class ResourceGoneException extends BaseServerResponseException {
/**
* Constructor which creates an error message based on a given resource ID
*
* @param theClass
* The type of resource that could not be found
* @param theResourceId
* The ID of the resource that could not be found
*
* @param theClass The type of resource that could not be found
* @param theResourceId The ID of the resource that could not be found
*/
public ResourceGoneException(Class<? extends IBaseResource> theClass, IIdType theResourceId) {
super(STATUS_CODE, "Resource of type " + theClass.getSimpleName() + " with ID " + theResourceId + " is gone/deleted");
@ -67,20 +67,21 @@ public class ResourceGoneException extends BaseServerResponseException {
/**
* Constructor
*
* @param theMessage
* The message
* @param theOperationOutcome
* The OperationOutcome resource to return to the client
*
* @param theMessage The message
* @param theOperationOutcome The OperationOutcome resource to return to the client
*/
public ResourceGoneException(String theMessage, IBaseOperationOutcome theOperationOutcome) {
super(STATUS_CODE, theMessage, theOperationOutcome);
}
/**
* Constructor
*
* @param theMessage The message
*/
public ResourceGoneException(String theMessage) {
super(STATUS_CODE, theMessage);
}
private static final long serialVersionUID = 1L;
}

View File

@ -20,44 +20,42 @@ package ca.uhn.fhir.rest.server.exceptions;
* #L%
*/
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
/**
* Represents an <b>HTTP 422 Unprocessable Entity</b> response, which means that a resource was rejected by the server because it "violated applicable FHIR profiles or server business rules".
*
*
* <p>
* This exception will generally contain an {@link IBaseOperationOutcome} instance which details the failure.
* </p>
*
*
* @see InvalidRequestException Which corresponds to an <b>HTTP 400 Bad Request</b> failure
*/
@CoverageIgnore
public class UnprocessableEntityException extends BaseServerResponseException {
public static final int STATUS_CODE = Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY;
private static final String DEFAULT_MESSAGE = "Unprocessable Entity";
private static final long serialVersionUID = 1L;
public static final int STATUS_CODE = Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY;
/**
* Constructor
*
* @param theMessage
* The message to add to the status line
* @param theOperationOutcome The {@link IBaseOperationOutcome} resource to return to the client
*
* @param theMessage The message to add to the status line
* @param theOperationOutcome The {@link IBaseOperationOutcome} resource to return to the client
*/
public UnprocessableEntityException(String theMessage, IBaseOperationOutcome theOperationOutcome) {
super(STATUS_CODE, theMessage, theOperationOutcome);
}
/**
* Constructor which accepts an {@link IBaseOperationOutcome} resource which will be supplied in the response
*
*
* @deprecated Use constructor with FhirContext argument
*/
@Deprecated
@ -79,6 +77,13 @@ public class UnprocessableEntityException extends BaseServerResponseException {
super(STATUS_CODE, theMessage);
}
/**
* Constructor which accepts a String describing the issue. This string will be translated into an {@link IBaseOperationOutcome} resource which will be supplied in the response.
*/
public UnprocessableEntityException(String theMessage, Throwable theCause) {
super(STATUS_CODE, theMessage, theCause);
}
/**
* Constructor which accepts an array of Strings describing the issue. This strings will be translated into an {@link IBaseOperationOutcome} resource which will be supplied in the response.
*/

View File

@ -154,11 +154,11 @@ public class BundleUtil {
BaseRuntimeChildDefinition urlChild = requestElem.getChildByName("url");
BaseRuntimeChildDefinition methodChild = requestElem.getChildByName("method");
IBaseResource resource = null;
String url = null;
RequestTypeEnum requestType = null;
for (IBase nextEntry : entries) {
IBaseResource resource = null;
String url = null;
RequestTypeEnum requestType = null;
for (IBase next : resourceChild.getAccessor().getValues(nextEntry)) {
resource = (IBaseResource) next;
}

View File

@ -86,7 +86,7 @@ public class ReflectionUtil {
public static Class<?> getGenericCollectionTypeOfMethodParameter(Method theMethod, int theParamIndex) {
Class<?> type;
Type genericParameterType = theMethod.getGenericParameterTypes()[theParamIndex];
if (Class.class.equals(genericParameterType)) {
if (Class.class.equals(genericParameterType) || Class.class.equals(genericParameterType.getClass())) {
return null;
}
ParameterizedType collectionType = (ParameterizedType) genericParameterType;

View File

@ -32,6 +32,7 @@ import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.commons.lang3.StringUtils.defaultString;
@ -144,6 +145,24 @@ public class TestUtil {
}
}
/**
* <b>THIS IS FOR UNIT TESTS ONLY - DO NOT CALL THIS METHOD FROM USER CODE</b>
* <p>
* Wait for an atomicinteger to hit a given site and fail if it never does
*/
public static void waitForSize(int theTarget, Callable<Integer> theSource) throws Exception {
long start = System.currentTimeMillis();
while (theSource.call() != theTarget && (System.currentTimeMillis() - start) <= 15000) {
try {
Thread.sleep(50);
} catch (InterruptedException theE) {
throw new Error(theE);
}
}
if ((System.currentTimeMillis() - start) >= 15000) {
throw new IllegalStateException("Size " + theSource.call() + " is != target " + theTarget);
}
}
/**
* <b>THIS IS FOR UNIT TESTS ONLY - DO NOT CALL THIS METHOD FROM USER CODE</b>

View File

@ -34,8 +34,17 @@ public class UrlPathTokenizer {
return myTok.hasMoreTokens();
}
public String nextToken() {
return UrlUtil.unescape(myTok.nextToken());
/**
* Returns the next portion. Any URL-encoding is undone, but we will
* HTML encode the &lt; and &quot; marks since they are both
* not useful un URL paths in FHIR and potentially represent injection
* attacks.
*
* @see UrlUtil#sanitizeUrlPart(String)
* @see UrlUtil#unescape(String)
*/
public String nextTokenUnescapedAndSanitized() {
return UrlUtil.sanitizeUrlPart(UrlUtil.unescape(myTok.nextToken()));
}
}

View File

@ -70,7 +70,7 @@ public class UrlUtil {
return theExtensionUrl;
}
if (theExtensionUrl == null) {
return theExtensionUrl;
return null;
}
int parentLastSlashIdx = theParentExtensionUrl.lastIndexOf('/');
@ -119,6 +119,18 @@ public class UrlUtil {
return value.startsWith("http://") || value.startsWith("https://");
}
public static boolean isNeedsSanitization(String theString) {
if (theString != null) {
for (int i = 0; i < theString.length(); i++) {
char nextChar = theString.charAt(i);
if (nextChar == '<' || nextChar == '"') {
return true;
}
}
}
return false;
}
public static boolean isValid(String theUrl) {
if (theUrl == null || theUrl.length() < 8) {
return false;
@ -164,7 +176,7 @@ public class UrlUtil {
}
public static Map<String, String[]> parseQueryString(String theQueryString) {
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
HashMap<String, List<String>> map = new HashMap<>();
parseQueryString(theQueryString, map);
return toQueryStringMap(map);
}
@ -197,17 +209,13 @@ public class UrlUtil {
nextKey = unescape(nextKey);
nextValue = unescape(nextValue);
List<String> list = map.get(nextKey);
if (list == null) {
list = new ArrayList<>();
map.put(nextKey, list);
}
List<String> list = map.computeIfAbsent(nextKey, k -> new ArrayList<>());
list.add(nextValue);
}
}
public static Map<String, String[]> parseQueryStrings(String... theQueryString) {
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
HashMap<String, List<String>> map = new HashMap<>();
for (String next : theQueryString) {
parseQueryString(next, map);
}
@ -222,7 +230,6 @@ public class UrlUtil {
* <li>[Resource Type]/[Resource ID]/_history/[Version ID]
* </ul>
*/
//@formatter:on
public static UrlParts parseUrl(String theUrl) {
String url = theUrl;
UrlParts retVal = new UrlParts();
@ -243,7 +250,7 @@ public class UrlUtil {
retVal.setVersionId(id.getVersionIdPart());
return retVal;
}
if (url.matches("\\/[a-zA-Z]+\\?.*")) {
if (url.matches("/[a-zA-Z]+\\?.*")) {
url = url.substring(1);
}
int nextStart = 0;
@ -282,12 +289,47 @@ public class UrlUtil {
}
//@formatter:off
/**
* This method specifically HTML-encodes the &quot; and
* &lt; characters in order to prevent injection attacks
*/
public static String sanitizeUrlPart(String theString) {
if (theString == null) {
return null;
}
boolean needsSanitization = isNeedsSanitization(theString);
if (needsSanitization) {
// Ok, we're sanitizing
StringBuilder buffer = new StringBuilder(theString.length() + 10);
for (int j = 0; j < theString.length(); j++) {
char nextChar = theString.charAt(j);
switch (nextChar) {
case '"':
buffer.append("&quot;");
break;
case '<':
buffer.append("&lt;");
break;
default:
buffer.append(nextChar);
break;
}
} // for build escaped string
return buffer.toString();
}
return theString;
}
private static Map<String, String[]> toQueryStringMap(HashMap<String, List<String>> map) {
HashMap<String, String[]> retVal = new HashMap<String, String[]>();
HashMap<String, String[]> retVal = new HashMap<>();
for (Entry<String, List<String>> nextEntry : map.entrySet()) {
retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[nextEntry.getValue().size()]));
retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[0]));
}
return retVal;
}

View File

@ -57,6 +57,7 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
# JPA Messages
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
@ -90,9 +91,13 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulUpdate=Successfully update
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully deleted {0} resource(s) in {1}ms
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}". Value search parameters for this search are: {1}
ca.uhn.fhir.jpa.dao.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1}
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}

View File

@ -43,15 +43,13 @@ import org.slf4j.LoggerFactory;
import org.springframework.util.Base64Utils;
import java.io.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import static org.apache.commons.lang3.StringUtils.*;
import static org.fusesource.jansi.Ansi.ansi;
public abstract class BaseCommand implements Comparable<BaseCommand> {
protected static final String BASE_URL_PARAM = "t";
@ -61,10 +59,10 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
protected static final String BASIC_AUTH_PARAM = "b";
protected static final String BASIC_AUTH_PARAM_LONGOPT = "basic-auth";
protected static final String BASIC_AUTH_PARAM_NAME = "basic-auth";
protected static final String BASIC_AUTH_PARAM_DESC = "If specified, this parameter supplies a username and password (in the format \"username:password\") to include in an HTTP Basic Auth header.";
protected static final String BASIC_AUTH_PARAM_DESC = "If specified, this parameter supplies a username and password (in the format \"username:password\") to include in an HTTP Basic Auth header. The value \"PROMPT\" may also be used to specify that an interactive prompt should request credentials from the user.";
protected static final String BEARER_TOKEN_PARAM_LONGOPT = "bearer-token";
protected static final String BEARER_TOKEN_PARAM_NAME = "bearer-token";
protected static final String BEARER_TOKEN_PARAM_DESC = "If specified, this parameter supplies a Bearer Token to supply with the request.";
protected static final String BEARER_TOKEN_PARAM_DESC = "If specified, this parameter supplies a Bearer Token to supply with the request. The value \"PROMPT\" may also be used to specify that an interactive prompt should request a Bearer Token from the user.";
protected static final String FHIR_VERSION_PARAM = "v";
protected static final String FHIR_VERSION_PARAM_LONGOPT = "fhir-version";
protected static final String FHIR_VERSION_PARAM_NAME = "version";
@ -74,6 +72,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
protected static final String VERBOSE_LOGGING_PARAM_DESC = "If specified, verbose logging will be used.";
// TODO: Don't use qualified names for loggers in HAPI CLI.
private static final Logger ourLog = LoggerFactory.getLogger(BaseCommand.class);
public static final String PROMPT = "PROMPT";
protected FhirContext myFhirCtx;
public BaseCommand() {
@ -89,6 +88,25 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
addOptionalOption(theOptions, null, BEARER_TOKEN_PARAM_LONGOPT, BEARER_TOKEN_PARAM_NAME, BEARER_TOKEN_PARAM_DESC);
}
protected String promptUser(String thePrompt) throws ParseException {
System.out.print(ansi().bold().fgBrightDefault());
System.out.print(thePrompt);
System.out.print(ansi().bold().fgBrightGreen());
System.out.flush();
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
String retVal;
try {
retVal = reader.readLine();
} catch (IOException e) {
throw new ParseException("Failed to read input from user: "+ e.toString());
}
System.out.print(ansi().boldOff().fgDefault());
return retVal;
}
protected void addFhirVersionOption(Options theOptions) {
String versions = Arrays.stream(FhirVersionEnum.values())
.filter(t -> t != FhirVersionEnum.DSTU2_1 && t != FhirVersionEnum.DSTU2_HL7ORG)
@ -98,6 +116,8 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
addRequiredOption(theOptions, FHIR_VERSION_PARAM, FHIR_VERSION_PARAM_LONGOPT, FHIR_VERSION_PARAM_NAME, FHIR_VERSION_PARAM_DESC + versions);
}
private void addOption(Options theOptions, OptionGroup theOptionGroup, boolean theRequired, String theOpt, String theLongOpt, boolean theHasArgument, String theArgumentName, String theDescription) {
Option option = createOption(theRequired, theOpt, theLongOpt, theHasArgument, theDescription);
if (theHasArgument && isNotBlank(theArgumentName)) {
@ -108,7 +128,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
if (theOptions.getOption(theOpt) != null) {
throw new IllegalStateException("Duplicate option: " + theOpt);
}
if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t-> theOpt.equals(t.getOpt()))) {
if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t -> theOpt.equals(t.getOpt()))) {
throw new IllegalStateException("Duplicate option: " + theOpt);
}
}
@ -116,7 +136,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
if (theOptions.getOption(theLongOpt) != null) {
throw new IllegalStateException("Duplicate option: " + theLongOpt);
}
if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t-> theLongOpt.equals(t.getLongOpt()))) {
if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t -> theLongOpt.equals(t.getLongOpt()))) {
throw new IllegalStateException("Duplicate option: " + theOpt);
}
}
@ -195,7 +215,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
buffer.write(data, 0, nRead);
long fileSize = FileUtils.sizeOf(localFile);
if (fileSize > nextLog) {
System.err.print("\r" + Ansi.ansi().eraseLine());
System.err.print("\r" + ansi().eraseLine());
System.err.print(FileUtils.byteCountToDisplaySize(fileSize));
if (maxLength > 0) {
System.err.print(" [");
@ -224,17 +244,22 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
/**
* @return Returns the complete authorization header value using the "-b" option
*/
protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine) {
protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine) throws ParseException {
return getAndParseOptionBasicAuthHeader(theCommandLine, BASIC_AUTH_PARAM);
}
/**
* @return Returns the complete authorization header value using an arbitrary option
*/
protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine, String theOptionName) {
protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine, String theOptionName) throws ParseException {
String basicAuthHeaderValue = null;
if (theCommandLine.hasOption(theOptionName)) {
byte[] basicAuth = theCommandLine.getOptionValue(theOptionName).getBytes();
String optionValue = theCommandLine.getOptionValue(theOptionName);
if (PROMPT.equals(optionValue)) {
promptUser("Enter Basic Auth Credentials (format is \"username:password\"): ");
}
byte[] basicAuth = optionValue.getBytes();
String base64EncodedBasicAuth = Base64Utils.encodeToString(basicAuth);
basicAuthHeaderValue = Constants.HEADER_AUTHORIZATION_VALPREFIX_BASIC + base64EncodedBasicAuth;
} else {
@ -362,8 +387,12 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
throw new ParseException("Invalid target server specified, must begin with 'http' or 'file'.");
}
return newClientWithBaseUrl(theCommandLine, baseUrl, theBasicAuthOptionName, theBearerTokenOptionName);
}
protected IGenericClient newClientWithBaseUrl(CommandLine theCommandLine, String theBaseUrl, String theBasicAuthOptionName, String theBearerTokenOptionName) throws ParseException {
myFhirCtx.getRestfulClientFactory().setSocketTimeout(10 * 60 * 1000);
IGenericClient retVal = myFhirCtx.newRestfulGenericClient(baseUrl);
IGenericClient retVal = myFhirCtx.newRestfulGenericClient(theBaseUrl);
String basicAuthHeaderValue = getAndParseOptionBasicAuthHeader(theCommandLine, theBasicAuthOptionName);
if (isNotBlank(basicAuthHeaderValue)) {
@ -371,7 +400,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
}
if (isNotBlank(theBearerTokenOptionName)) {
String bearerToken = theCommandLine.getOptionValue(theBearerTokenOptionName);
String bearerToken = getAndParseBearerTokenAuthHeader(theCommandLine, theBearerTokenOptionName);
if (isNotBlank(bearerToken)) {
retVal.registerInterceptor(new SimpleRequestHeaderInterceptor(Constants.HEADER_AUTHORIZATION, Constants.HEADER_AUTHORIZATION_VALPREFIX_BEARER + bearerToken));
}
@ -380,6 +409,14 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
return retVal;
}
private String getAndParseBearerTokenAuthHeader(CommandLine theCommandLine, String theBearerTokenOptionName) throws ParseException {
String value = theCommandLine.getOptionValue(theBearerTokenOptionName);
if (PROMPT.equals(value)) {
return promptUser("Enter Bearer Token: ");
}
return value;
}
protected void parseFhirContext(CommandLine theCommandLine) throws ParseException {
String version = theCommandLine.getOptionValue(FHIR_VERSION_PARAM);
if (isBlank(version)) {

View File

@ -28,9 +28,6 @@ import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -82,10 +79,17 @@ public class UploadTerminologyCommand extends BaseCommand {
IGenericClient client = super.newClient(theCommandLine);
IBaseParameters inputParameters;
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
Parameters p = new Parameters();
p.addParameter().setName("url").setValue(new UriType(termUrl));
org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new StringType(next));
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next));
}
inputParameters = p;
} else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) {
org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
}
inputParameters = p;
} else {

View File

@ -31,6 +31,13 @@
<appender-ref ref="STDOUT" />
</logger>
<!--
It's useful to have this log when uploading big terminologies
-->
<logger name="ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl" additivity="false" level="info">
<appender-ref ref="STDOUT" />
</logger>
<root level="warn">
<appender-ref ref="STDOUT" />

View File

@ -0,0 +1,60 @@
package ca.uhn.fhir.cli;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import static org.junit.Assert.assertEquals;
public class BaseCommandTest {
@Test
public void testUserPrompt() throws ParseException {
InputStream stdin = System.in;
try {
System.setIn(new ByteArrayInputStream("A VALUE".getBytes()));
String value = new MyBaseCommand().read();
assertEquals("A VALUE", value);
} finally {
System.setIn(stdin);
}
}
private static class MyBaseCommand extends BaseCommand {
@Override
public String getCommandDescription() {
return null;
}
String read() throws ParseException {
return promptUser("Enter a String: ");
}
@Override
public String getCommandName() {
return null;
}
@Override
public Options getOptions() {
return null;
}
@Override
public void run(CommandLine theCommandLine) {
}
}
public static void main(String[] theValue) throws ParseException {
new BaseCommandTest().testUserPrompt();
}
}

View File

@ -25,7 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.AbstractHashMapResourceProvider;
import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider;
import com.google.common.base.Charsets;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
@ -42,7 +42,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This is a subclass to implement FHIR operations specific to DSTU3 ConceptMap
* resources. Its superclass, {@link AbstractHashMapResourceProvider}, is a simple
* resources. Its superclass, {@link HashMapResourceProvider}, is a simple
* implementation of the resource provider interface that uses a HashMap to
* store all resources in memory.
* <p>
@ -53,7 +53,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* <li>Conditional update for DSTU3 ConceptMap resources by ConceptMap.url</li>
* </ul>
*/
public class HashMapResourceProviderConceptMapDstu3 extends AbstractHashMapResourceProvider<ConceptMap> {
public class HashMapResourceProviderConceptMapDstu3 extends HashMapResourceProvider<ConceptMap> {
@SuppressWarnings("unchecked")
public HashMapResourceProviderConceptMapDstu3(FhirContext theFhirContext) {
super(theFhirContext, ConceptMap.class);
@ -84,10 +84,10 @@ public class HashMapResourceProviderConceptMapDstu3 extends AbstractHashMapResou
return retVal;
}
@Override
@Update
public MethodOutcome updateConceptMapConditional(
public MethodOutcome update(
@ResourceParam ConceptMap theConceptMap,
@IdParam IdType theId,
@ConditionalUrlParam String theConditional) {
MethodOutcome methodOutcome = new MethodOutcome();
@ -112,14 +112,14 @@ public class HashMapResourceProviderConceptMapDstu3 extends AbstractHashMapResou
List<ConceptMap> conceptMaps = searchByUrl(url);
if (!conceptMaps.isEmpty()) {
methodOutcome = update(conceptMaps.get(0));
methodOutcome = super.update(conceptMaps.get(0), null);
} else {
methodOutcome = create(theConceptMap);
}
}
} else {
methodOutcome = update(theConceptMap);
methodOutcome = super.update(theConceptMap, null);
}
return methodOutcome;

View File

@ -25,7 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.AbstractHashMapResourceProvider;
import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider;
import com.google.common.base.Charsets;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
@ -42,7 +42,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This is a subclass to implement FHIR operations specific to R4 ConceptMap
* resources. Its superclass, {@link AbstractHashMapResourceProvider}, is a simple
* resources. Its superclass, {@link HashMapResourceProvider}, is a simple
* implementation of the resource provider interface that uses a HashMap to
* store all resources in memory.
* <p>
@ -53,7 +53,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* <li>Conditional update for R4 ConceptMap resources by ConceptMap.url</li>
* </ul>
*/
public class HashMapResourceProviderConceptMapR4 extends AbstractHashMapResourceProvider<ConceptMap> {
public class HashMapResourceProviderConceptMapR4 extends HashMapResourceProvider<ConceptMap> {
@SuppressWarnings("unchecked")
public HashMapResourceProviderConceptMapR4(FhirContext theFhirContext) {
super(theFhirContext, ConceptMap.class);
@ -84,16 +84,15 @@ public class HashMapResourceProviderConceptMapR4 extends AbstractHashMapResource
return retVal;
}
@Override
@Update
public MethodOutcome updateConceptMapConditional(
public MethodOutcome update(
@ResourceParam ConceptMap theConceptMap,
@IdParam IdType theId,
@ConditionalUrlParam String theConditional) {
MethodOutcome methodOutcome = new MethodOutcome();
if (theConditional != null) {
String url = null;
try {
@ -112,14 +111,14 @@ public class HashMapResourceProviderConceptMapR4 extends AbstractHashMapResource
List<ConceptMap> conceptMaps = searchByUrl(url);
if (!conceptMaps.isEmpty()) {
methodOutcome = update(conceptMaps.get(0));
methodOutcome = super.update(conceptMaps.get(0), null);
} else {
methodOutcome = create(theConceptMap);
}
}
} else {
methodOutcome = update(theConceptMap);
methodOutcome = super.update(theConceptMap, null);
}
return methodOutcome;

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.okhttp.client;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -74,11 +75,11 @@ public class OkHttpRestfulRequest implements IHttpRequest {
@Override
public Map<String, List<String>> getAllHeaders() {
return myRequestBuilder.build().headers().toMultimap();
return Collections.unmodifiableMap(myRequestBuilder.build().headers().toMultimap());
}
@Override
public String getRequestBodyFromStream() throws IOException {
public String getRequestBodyFromStream() {
// returning null to indicate this is not supported, as documented in IHttpRequest's contract
return null;
}

View File

@ -22,10 +22,7 @@ package ca.uhn.fhir.rest.client.apache;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.*;
import ca.uhn.fhir.util.StopWatch;
import org.apache.commons.io.IOUtils;
@ -70,14 +67,14 @@ public class ApacheHttpRequest implements IHttpRequest {
@Override
public Map<String, List<String>> getAllHeaders() {
Map<String, List<String>> result = new HashMap<String, List<String>>();
Map<String, List<String>> result = new HashMap<>();
for (Header header : myRequest.getAllHeaders()) {
if (!result.containsKey(header.getName())) {
result.put(header.getName(), new LinkedList<String>());
result.put(header.getName(), new LinkedList<>());
}
result.get(header.getName()).add(header.getValue());
}
return result;
return Collections.unmodifiableMap(result);
}
/**

View File

@ -316,7 +316,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
private static void addParam(Map<String, List<String>> params, String parameterName, String parameterValue) {
if (!params.containsKey(parameterName)) {
params.put(parameterName, new ArrayList<String>());
params.put(parameterName, new ArrayList<>());
}
params.get(parameterName).add(parameterValue);
}
@ -516,6 +516,19 @@ public class GenericClient extends BaseClient implements IGenericClient {
return (QUERY) this;
}
@Override
public QUERY whereMap(Map<String, List<String>> theRawMap) {
if (theRawMap != null) {
for (String nextKey : theRawMap.keySet()) {
for (String nextValue : theRawMap.get(nextKey)) {
addParam(myParams, nextKey, nextValue);
}
}
}
return (QUERY) this;
}
@SuppressWarnings("unchecked")
@Override
public QUERY where(Map<String, List<IQueryParameterType>> theCriterion) {
@ -743,6 +756,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
private Class<? extends IBaseBundle> myReturnType;
private IPrimitiveType mySince;
private Class<? extends IBaseResource> myType;
private DateRangeParam myAt;
@SuppressWarnings("unchecked")
@Override
@ -752,6 +766,12 @@ public class GenericClient extends BaseClient implements IGenericClient {
return this;
}
@Override
public IHistoryTyped at(DateRangeParam theDateRangeParam) {
myAt = theDateRangeParam;
return this;
}
@Override
public IHistoryTyped count(Integer theCount) {
myCount = theCount;
@ -774,7 +794,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
id = null;
}
HttpGetClientInvocation invocation = HistoryMethodBinding.createHistoryInvocation(myContext, resourceName, id, mySince, myCount);
HttpGetClientInvocation invocation = HistoryMethodBinding.createHistoryInvocation(myContext, resourceName, id, mySince, myCount, myAt);
IClientResponseHandler handler;
handler = new ResourceResponseHandler(myReturnType, getPreferResponseTypes(myType));
@ -1103,7 +1123,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
@SuppressWarnings("unchecked")
@Override
public Object execute() {
if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE)) {
if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE) && myMsgBundle != null) {
Map<String, List<String>> urlParams = new LinkedHashMap<String, List<String>>();
// Set Url parameter Async and Response-Url
if (myIsAsync != null) {
@ -1847,6 +1867,16 @@ public class GenericClient extends BaseClient implements IGenericClient {
return retVal;
}
@Override
public IQuery sort(SortSpec theSortSpec) {
SortSpec sortSpec = theSortSpec;
while (sortSpec != null) {
mySort.add(new SortInternal(sortSpec));
sortSpec = sortSpec.getChain();
}
return this;
}
@Override
public IQuery usingStyle(SearchStyleEnum theStyle) {
mySearchStyle = theStyle;
@ -2133,6 +2163,18 @@ public class GenericClient extends BaseClient implements IGenericClient {
myFor = theFor;
}
public SortInternal(SortSpec theSortSpec) {
if (theSortSpec.getOrder() == null) {
myParamName = Constants.PARAM_SORT;
} else if (theSortSpec.getOrder() == SortOrderEnum.ASC) {
myParamName = Constants.PARAM_SORT_ASC;
} else if (theSortSpec.getOrder() == SortOrderEnum.DESC) {
myParamName = Constants.PARAM_SORT_DESC;
}
myDirection = theSortSpec.getOrder();
myParamValue = theSortSpec.getParamName();
}
@Override
public IQuery ascending(IParam theParam) {
myParamName = Constants.PARAM_SORT_ASC;
@ -2157,6 +2199,14 @@ public class GenericClient extends BaseClient implements IGenericClient {
return myFor;
}
@Override
public IQuery defaultOrder(String theParam) {
myParamName = Constants.PARAM_SORT;
myDirection = null;
myParamValue = theParam;
return myFor;
}
@Override
public IQuery descending(IParam theParam) {
myParamName = Constants.PARAM_SORT_DESC;

View File

@ -29,6 +29,7 @@ import org.apache.commons.lang3.StringUtils;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.api.*;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.Validate;
/**
* HTTP interceptor to be used for adding HTTP basic auth username/password tokens
@ -42,23 +43,29 @@ public class BasicAuthInterceptor implements IClientInterceptor {
private String myUsername;
private String myPassword;
private String myHeaderValue;
public BasicAuthInterceptor(String theUsername, String thePassword) {
super();
myUsername = theUsername;
myPassword = thePassword;
/**
* @param theUsername The username
* @param thePassword The password
*/
public BasicAuthInterceptor(String theUsername, String thePassword) {
this(StringUtils.defaultString(theUsername) + ":" + StringUtils.defaultString(thePassword));
}
/**
* @param theCredentialString A credential string in the format <code>username:password</code>
*/
public BasicAuthInterceptor(String theCredentialString) {
Validate.notBlank(theCredentialString, "theCredentialString must not be null or blank");
Validate.isTrue(theCredentialString.contains(":"), "theCredentialString must be in the format 'username:password'");
String encoded = Base64.encodeBase64String(theCredentialString.getBytes(Constants.CHARSET_US_ASCII));
myHeaderValue = "Basic " + encoded;
}
@Override
public void interceptRequest(IHttpRequest theRequest) {
String authorizationUnescaped = StringUtils.defaultString(myUsername) + ":" + StringUtils.defaultString(myPassword);
String encoded;
try {
encoded = Base64.encodeBase64String(authorizationUnescaped.getBytes("ISO-8859-1"));
} catch (UnsupportedEncodingException e) {
throw new InternalErrorException("Could not find US-ASCII encoding. This shouldn't happen!");
}
theRequest.addHeader(Constants.HEADER_AUTHORIZATION, ("Basic " + encoded));
theRequest.addHeader(Constants.HEADER_AUTHORIZATION, myHeaderValue);
}
@Override
@ -66,6 +73,4 @@ public class BasicAuthInterceptor implements IClientInterceptor {
// nothing
}
}

View File

@ -20,19 +20,20 @@ package ca.uhn.fhir.rest.client.interceptor;
* #L%
*/
import java.io.IOException;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import java.io.IOException;
/**
* Client interceptor which simply captures request and response objects and stores them so that they can be inspected after a client
* call has returned
*
* @see ThreadLocalCapturingInterceptor for an interceptor that uses a ThreadLocal in order to work in multithreaded environments
*/
public class CapturingInterceptor implements IClientInterceptor {
@ -63,10 +64,16 @@ public class CapturingInterceptor implements IClientInterceptor {
@Override
public void interceptResponse(IHttpResponse theResponse) {
//Buffer the reponse to avoid errors when content has already been read and the entity is not repeatable
bufferResponse(theResponse);
myLastResponse = theResponse;
}
static void bufferResponse(IHttpResponse theResponse) {
try {
if(theResponse.getResponse() instanceof HttpResponse) {
if (theResponse.getResponse() instanceof HttpResponse) {
HttpEntity entity = ((HttpResponse) theResponse.getResponse()).getEntity();
if( entity != null && !entity.isRepeatable()){
if (entity != null && !entity.isRepeatable()) {
theResponse.bufferEntity();
}
} else {
@ -75,9 +82,6 @@ public class CapturingInterceptor implements IClientInterceptor {
} catch (IOException e) {
throw new InternalErrorException("Unable to buffer the entity for capturing", e);
}
myLastResponse = theResponse;
}
}

View File

@ -0,0 +1,99 @@
package ca.uhn.fhir.rest.client.interceptor;
/*-
* #%L
* HAPI FHIR - Client Framework
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import java.io.IOException;
/**
* This is a client interceptor that captures the current request and response
* in a ThreadLocal variable, meaning that it can work in multithreaded
* environments without mixing up requests.
* <p>
* Use this with caution, since <b>this interceptor does not automatically clean up</b>
* the ThreadLocal after setting it. You must make sure to call
* {@link #clearThreadLocals()} after a given request has been completed,
* or you will end up leaving stale request/response objects associated
* with threads that no longer need them.
* </p>
*
* @see CapturingInterceptor for an equivalent interceptor that does not use a ThreadLocal
* @since 3.5.0
*/
public class ThreadLocalCapturingInterceptor implements IClientInterceptor {
private final ThreadLocal<IHttpRequest> myRequestThreadLocal = new ThreadLocal<>();
private final ThreadLocal<IHttpResponse> myResponseThreadLocal = new ThreadLocal<>();
private boolean myBufferResponse;
/**
* This method should be called at the end of any request process, in
* order to clear the last request and response from the current thread.
*/
public void clearThreadLocals() {
myRequestThreadLocal.remove();
myResponseThreadLocal.remove();
}
public IHttpRequest getRequestForCurrentThread() {
return myRequestThreadLocal.get();
}
public IHttpResponse getResponseForCurrentThread() {
return myResponseThreadLocal.get();
}
@Override
public void interceptRequest(IHttpRequest theRequest) {
myRequestThreadLocal.set(theRequest);
}
@Override
public void interceptResponse(IHttpResponse theResponse) {
if (isBufferResponse()) {
CapturingInterceptor.bufferResponse(theResponse);
}
myResponseThreadLocal.set(theResponse);
}
/**
* Should we buffer (capture) the response body? This defaults to
* <code>false</code>. Set to <code>true</code> if you are planning on
* examining response bodies after the response processing is complete.
*/
public boolean isBufferResponse() {
return myBufferResponse;
}
/**
* Should we buffer (capture) the response body? This defaults to
* <code>false</code>. Set to <code>true</code> if you are planning on
* examining response bodies after the response processing is complete.
*/
public ThreadLocalCapturingInterceptor setBufferResponse(boolean theBufferResponse) {
myBufferResponse = theBufferResponse;
return this;
}
}

View File

@ -57,17 +57,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
private IIdType myForceResourceId;
public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, Map<String, List<String>> theParams, String... theUrlPath) {
super(theContext);
myResource = theResource;
myUrlPath = StringUtils.join(theUrlPath, '/');
myResources = null;
myContents = null;
myParams = theParams;
myBundleType = null;
}
public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, String theUrlPath) {
super(theContext);
myResource = theResource;
@ -105,17 +94,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
myBundleType = null;
}
public BaseHttpClientInvocationWithContents(FhirContext theContext, String theContents, Map<String, List<String>> theParams, String... theUrlPath) {
super(theContext);
myResource = null;
myUrlPath = StringUtils.join(theUrlPath, '/');
myResources = null;
myContents = theContents;
myParams = theParams;
myBundleType = null;
}
@Override
public IHttpRequest asHttpRequest(String theUrlBase, Map<String, List<String>> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) throws DataFormatException {
StringBuilder url = new StringBuilder();

View File

@ -26,6 +26,8 @@ import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Date;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.FhirContext;
@ -96,7 +98,7 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding {
}
String historyId = id != null ? id.getIdPart() : null;
HttpGetClientInvocation retVal = createHistoryInvocation(getContext(), resourceName, historyId, null, null);
HttpGetClientInvocation retVal = createHistoryInvocation(getContext(), resourceName, historyId, null, null, null);
if (theArgs != null) {
for (int idx = 0; idx < theArgs.length; idx++) {
@ -108,7 +110,7 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding {
return retVal;
}
public static HttpGetClientInvocation createHistoryInvocation(FhirContext theContext, String theResourceName, String theId, IPrimitiveType<Date> theSince, Integer theLimit) {
public static HttpGetClientInvocation createHistoryInvocation(FhirContext theContext, String theResourceName, String theId, IPrimitiveType<Date> theSince, Integer theLimit, DateRangeParam theAt) {
StringBuilder b = new StringBuilder();
if (theResourceName != null) {
b.append(theResourceName);
@ -129,8 +131,18 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding {
}
if (theLimit != null) {
b.append(haveParam ? '&' : '?');
haveParam = true;
b.append(Constants.PARAM_COUNT).append('=').append(theLimit);
}
if (theAt != null) {
for (DateParam next : theAt.getValuesAsQueryTokens()) {
b.append(haveParam ? '&' : '?');
haveParam = true;
b.append(Constants.PARAM_AT);
b.append("=");
b.append(next.getValueAsQueryToken(theContext));
}
}
HttpGetClientInvocation retVal = new HttpGetClientInvocation(theContext, b.toString());
return retVal;

View File

@ -53,7 +53,7 @@ public class SearchParameter extends BaseQueryParameter {
ourParamTypes.put(StringParam.class, RestSearchParameterTypeEnum.STRING);
ourParamTypes.put(StringOrListParam.class, RestSearchParameterTypeEnum.STRING);
ourParamTypes.put(StringAndListParam.class, RestSearchParameterTypeEnum.STRING);
ourParamQualifiers.put(RestSearchParameterTypeEnum.STRING, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING));
ourParamQualifiers.put(RestSearchParameterTypeEnum.STRING, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_STRING_CONTAINS, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING));
ourParamTypes.put(UriParam.class, RestSearchParameterTypeEnum.URI);
ourParamTypes.put(UriOrListParam.class, RestSearchParameterTypeEnum.URI);

View File

@ -28,10 +28,7 @@ import ca.uhn.fhir.util.StopWatch;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.*;
/**
* A Http Request based on JaxRs. This is an adapter around the class
@ -41,7 +38,7 @@ import java.util.Map;
*/
public class JaxRsHttpRequest implements IHttpRequest {
private final Map<String, List<String>> myHeaders = new HashMap<String, List<String>>();
private final Map<String, List<String>> myHeaders = new HashMap<>();
private Invocation.Builder myRequest;
private RequestTypeEnum myRequestType;
private Entity<?> myEntity;
@ -55,7 +52,7 @@ public class JaxRsHttpRequest implements IHttpRequest {
@Override
public void addHeader(String theName, String theValue) {
if (!myHeaders.containsKey(theName)) {
myHeaders.put(theName, new LinkedList<String>());
myHeaders.put(theName, new LinkedList<>());
}
myHeaders.get(theName).add(theValue);
getRequest().header(theName, theValue);
@ -71,7 +68,7 @@ public class JaxRsHttpRequest implements IHttpRequest {
@Override
public Map<String, List<String>> getAllHeaders() {
return this.myHeaders;
return Collections.unmodifiableMap(this.myHeaders);
}
/**

View File

@ -502,6 +502,7 @@
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava-testlib</artifactId>

View File

@ -50,6 +50,7 @@ import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import javax.annotation.Nonnull;
import java.util.concurrent.ScheduledExecutorService;
@Configuration
@EnableScheduling
@ -100,10 +101,11 @@ public abstract class BaseConfig implements SchedulingConfigurer {
}
@Bean()
public ScheduledExecutorFactoryBean scheduledExecutorService() {
public ScheduledExecutorService scheduledExecutorService() {
ScheduledExecutorFactoryBean b = new ScheduledExecutorFactoryBean();
b.setPoolSize(5);
return b;
b.afterPropertiesSet();
return b.getObject();
}
@Bean(autowire = Autowire.BY_TYPE)
@ -147,8 +149,8 @@ public abstract class BaseConfig implements SchedulingConfigurer {
@Bean(name = TASK_EXECUTOR_NAME)
public TaskScheduler taskScheduler() {
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
retVal.setConcurrentExecutor(scheduledExecutorService().getObject());
retVal.setScheduledExecutor(scheduledExecutorService().getObject());
retVal.setConcurrentExecutor(scheduledExecutorService());
retVal.setScheduledExecutor(scheduledExecutorService());
return retVal;
}

View File

@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
public IValidatorModule instanceValidatorDstu2() {
FhirInstanceValidator retVal = new FhirInstanceValidator();
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
return retVal;
}
@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IFulltextSearchSvc searchDao() {
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu2();

View File

@ -3,10 +3,8 @@ package ca.uhn.fhir.jpa.config.dstu3;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
import ca.uhn.fhir.jpa.dao.*;
import ca.uhn.fhir.jpa.dao.dstu3.TransactionProcessorVersionAdapterDstu3;
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamRegistryDstu3;
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
@ -19,7 +17,9 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
@ -69,6 +69,16 @@ public class BaseDstu3Config extends BaseConfig {
return retVal;
}
@Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterDstu3();
}
@Bean
public TransactionProcessor<Bundle, Bundle.BundleEntryComponent> transactionProcessor() {
return new TransactionProcessor<>();
}
@Bean(name = "myInstanceValidatorDstu3")
@Lazy
public IValidatorModule instanceValidatorDstu3() {
@ -78,13 +88,17 @@ public class BaseDstu3Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
return new JpaValidationSupportChainDstu3();
}
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
@ -142,7 +156,7 @@ public class BaseDstu3Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
return new JpaValidationSupportChainDstu3();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -3,12 +3,10 @@ package ca.uhn.fhir.jpa.config.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
import ca.uhn.fhir.jpa.dao.*;
import ca.uhn.fhir.jpa.dao.r4.SearchParamExtractorR4;
import ca.uhn.fhir.jpa.dao.r4.SearchParamRegistryR4;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.provider.r4.TerminologyUploaderProviderR4;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcR4;
@ -21,7 +19,9 @@ import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.utils.GraphQLEngine;
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
import org.springframework.beans.factory.annotation.Autowire;
@ -72,6 +72,16 @@ public class BaseR4Config extends BaseConfig {
return retVal;
}
@Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterR4();
}
@Bean
public TransactionProcessor<Bundle, Bundle.BundleEntryComponent> transactionProcessor() {
return new TransactionProcessor<>();
}
@Bean(name = "myGraphQLProvider")
@Lazy
public GraphQLProvider graphQLProvider() {
@ -93,6 +103,11 @@ public class BaseR4Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
return new JpaValidationSupportChainR4();
}
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
@ -102,7 +117,7 @@ public class BaseR4Config extends BaseConfig {
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoR4().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
retVal.setCacheMillis(10 * DateUtils.MILLIS_PER_MINUTE);
return retVal;
}
@ -156,7 +171,7 @@ public class BaseR4Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
public IValidationSupport validationSupportChainR4() {
return new JpaValidationSupportChainR4();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -1,25 +1,5 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.entity.*;
@ -58,7 +38,6 @@ import ca.uhn.fhir.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
@ -95,7 +74,6 @@ import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import java.io.CharArrayWriter;
import java.io.UnsupportedEncodingException;
import java.text.Normalizer;
import java.util.*;
import java.util.Map.Entry;
@ -104,6 +82,26 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@SuppressWarnings("WeakerAccess")
@Repository
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, ApplicationContextAware {
@ -186,6 +184,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected IResourceSearchViewDao myResourceViewDao;
@Autowired(required = true)
private DaoConfig myConfig;
private FhirContext myContext;
@ -199,14 +199,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
private ISearchResultDao mySearchResultDao;
//@Autowired
//private ISearchResultDao mySearchResultDao;
@Autowired
private IResourceIndexedCompositeStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
private ApplicationContext myApplicationContext;
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> myResourceTypeToDao;
protected void clearRequestAsProcessingSubRequest(ServletRequestDetails theRequestDetails) {
public static void clearRequestAsProcessingSubRequest(ServletRequestDetails theRequestDetails) {
if (theRequestDetails != null) {
theRequestDetails.getUserData().remove(PROCESSING_SUB_REQUEST);
}
@ -227,6 +227,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
if (!getConfig().isExpungeEnabled()) {
throw new MethodNotAllowedException("$expunge is not enabled on this server");
@ -245,32 +246,39 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> resourceIds;
if (theResourceId != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
if (theResourceName != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> resourceIds = txTemplate.execute(t -> {
if (theResourceId != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
resourceIds = myResourceTableDao.findIdsOfDeletedResources(page);
if (theResourceName != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
} else {
return myResourceTableDao.findIdsOfDeletedResources(page);
}
}
}
});
for (Long next : resourceIds) {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
/*
* Delete current versions of deleted resources
*/
for (Long next : resourceIds) {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -280,22 +288,26 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of non-deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> historicalIds;
if (theResourceId != null && theVersion != null) {
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
if (theResourceName != null) {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> historicalIds = txTemplate.execute(t -> {
if (theResourceId != null && theVersion != null) {
return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
if (theResourceName != null) {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
} else {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
}
}
}
});
for (Long next : historicalIds) {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -315,7 +327,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
});
txTemplate.execute(t -> {
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + SearchParam.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
@ -704,58 +715,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t->myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@ -910,7 +869,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
param = new ResourceIndexedSearchParamQuantity();
break;
case STRING:
param = new ResourceIndexedSearchParamString();
param = new ResourceIndexedSearchParamString()
.setDaoConfig(myConfig);
break;
case TOKEN:
param = new ResourceIndexedSearchParamToken();
@ -957,18 +917,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return myConfig;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
public void setConfig(DaoConfig theConfig) {
myConfig = theConfig;
}
@ -995,6 +943,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t -> myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
return myResourceIndexedCompositeStringUniqueDao;
}
@ -1164,7 +1156,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return false;
}
protected void markRequestAsProcessingSubRequest(ServletRequestDetails theRequestDetails) {
public static void markRequestAsProcessingSubRequest(ServletRequestDetails theRequestDetails) {
if (theRequestDetails != null) {
theRequestDetails.getUserData().put(PROCESSING_SUB_REQUEST, Boolean.TRUE);
}
@ -1172,13 +1164,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
@Override
public SearchBuilder newSearchBuilder() {
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao,
myTerminologySvc, mySerarchParamRegistry, myResourceHistoryTableDao, myResourceTagDao);
SearchBuilder builder = new SearchBuilder(
getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao, myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
return builder;
}
protected void notifyInterceptors(RestOperationTypeEnum theOperationType, ActionRequestDetails theRequestDetails) {
public void notifyInterceptors(RestOperationTypeEnum theOperationType, ActionRequestDetails theRequestDetails) {
if (theRequestDetails.getId() != null && theRequestDetails.getId().hasResourceType() && isNotBlank(theRequestDetails.getResourceType())) {
if (theRequestDetails.getId().getResourceType().equals(theRequestDetails.getResourceType()) == false) {
throw new InternalErrorException(
@ -1223,7 +1215,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
private void populateResourceIdFromEntity(BaseHasResource theEntity, final IBaseResource theResource) {
private void populateResourceIdFromEntity(IBaseResourceEntity theEntity, final IBaseResource theResource) {
IIdType id = theEntity.getIdDt();
if (getContext().getVersion().getVersion().isRi()) {
id = getContext().getVersion().newIdType().setValue(id.getValue());
@ -1257,25 +1249,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
if (theEntity.getDeleted() == null) {
encoding = myConfig.getResourceEncoding();
IParser parser = encoding.newParser(myContext);
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
String encoded = parser.encodeResourceToString(theResource);
Set<String> excludeElements = EXCLUDE_ELEMENTS_IN_ENCODED;
theEntity.setFhirVersion(myContext.getVersion().getVersion());
switch (encoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
default:
case DEL:
bytes = new byte[0];
break;
}
ourLog.debug("Encoded {} chars of resource body as {} bytes", encoded.length(), bytes.length);
bytes = encodeResource(theResource, encoding, excludeElements, myContext);
if (theUpdateHash) {
HashFunction sha256 = Hashing.sha256();
@ -1308,20 +1285,24 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
// Don't keep duplicate tags
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
Set<TagDefinition> allDefsPresent = new HashSet<>();
theEntity.getTags().removeIf(theResourceTag -> !allDefsPresent.add(theResourceTag.getTag()));
allTagsNew.forEach(tag -> {
// Remove any tags that have been removed
for (ResourceTag next : allTagsOld) {
if (!allDefs.contains(next)) {
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, next)) {
theEntity.getTags().remove(next);
// Don't keep duplicate tags
if (!allDefsPresent.add(tag.getTag())) {
theEntity.getTags().remove(tag);
}
// Drop any tags that have been removed
if (!allDefs.contains(tag)) {
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
theEntity.getTags().remove(tag);
}
}
}
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
});
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
@ -1355,7 +1336,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, BaseHasResource theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -1421,7 +1402,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, BaseHasResource theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -1480,6 +1461,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
/**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
*
* @param theEntity The resource
*/
protected void postDelete(ResourceTable theEntity) {
// nothing
}
/**
* Subclasses may override to provide behaviour. Called when a resource has been inserted into the database for the first time.
*
@ -1536,6 +1526,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime);
@ -1592,6 +1594,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return false;
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
@ -1601,62 +1608,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
return toResource(resourceType, theEntity, null, null, theForHistoryOperation);
return toResource(resourceType, theEntity, null, theForHistoryOperation);
}
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, ResourceHistoryTable theHistory, Collection<ResourceTag> theTagList,
boolean theForHistoryOperation) {
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes = null;
ResourceEncodingEnum resourceEncoding = null;
Collection<? extends BaseTag> myTagList = null;
// May 28, 2018 - #936
// Could set historyList to null, if it's not called in the loop for the backward compatibility
ResourceHistoryTable history;
if (theEntity instanceof ResourceHistoryTable) {
history = (ResourceHistoryTable) theEntity;
} else {
if (theHistory == null) {
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
} else {
history = theHistory;
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = history.getTags();
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
if (history == null) {
return null;
}
}
if (history == null) {
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = resource.getTags();
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView myView = (ResourceSearchView) theEntity;
resourceBytes = myView.getResource();
resourceEncoding = myView.getEncoding();
if (theTagList == null)
myTagList = new HashSet<>();
else
myTagList = theTagList;
} else {
// something wrong
return null;
}
byte[] resourceBytes = history.getResource();
ResourceEncodingEnum resourceEncoding = history.getEncoding();
// 2. get The text
String resourceText = decodeResource(resourceBytes, resourceEncoding);
String resourceText = null;
switch (resourceEncoding) {
case JSON:
try {
resourceText = new String(resourceBytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new Error("Should not happen", e);
}
break;
case JSONC:
resourceText = GZipUtil.decompress(resourceBytes);
break;
case DEL:
break;
}
// get preload the tagList
Collection<? extends BaseTag> myTagList;
if (theTagList == null)
myTagList = theEntity.getTags();
else
myTagList = theTagList;
/*
* Use the appropriate custom type if one is specified in the context
*/
// 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = theResourceType;
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : myTagList) {
@ -1674,6 +1669,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
// 4. parse the text to FHIR
R retVal;
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
@ -1704,6 +1700,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
// 5. fill MetaData
if (retVal instanceof IResource) {
IResource res = (IResource) retVal;
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
@ -1712,7 +1709,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
}
return retVal;
}
@ -1745,13 +1741,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return theResourceType + '/' + theId.toString();
}
}
@SuppressWarnings("unchecked")
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
Validate.notNull(theEntity);
Validate.isTrue(theDeletedTimestampOrNull != null || theResource != null, "Must have either a resource[{}] or a deleted timestamp[{}] for resource PID[{}]", theDeletedTimestampOrNull != null, theResource != null, theEntity.getId());
ourLog.debug("Starting entity update");
/*
* This should be the very first thing..
*/
@ -1841,6 +1841,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
theEntity.setHashSha256(null);
theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
changed = populateResourceIntoEntity(theRequest, theResource, theEntity, true);
} else {
@ -2018,6 +2019,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
postPersist(theEntity, (T) theResource);
} else if (theEntity.getDeleted() != null) {
theEntity = myEntityManager.merge(theEntity);
postDelete(theEntity);
} else {
theEntity = myEntityManager.merge(theEntity);
@ -2029,10 +2035,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
*/
if (theCreateNewHistoryEntry) {
final ResourceHistoryTable historyEntry = theEntity.toHistory();
// if (theEntity.getVersion() > 1) {
// existing = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
// ourLog.warn("Reusing existing history entry entity {}", theEntity.getIdDt().getValue());
// }
historyEntry.setEncoding(changed.getEncoding());
historyEntry.setResource(changed.getResource());
@ -2072,6 +2074,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
if (thePerformIndexing) {
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
next.setDaoConfig(myConfig);
myEntityManager.remove(next);
theEntity.getParamsString().remove(next);
}
@ -2163,12 +2166,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} // if thePerformIndexing
theEntity = myEntityManager.merge(theEntity);
if (theResource != null) {
populateResourceIdFromEntity(theEntity, theResource);
}
return theEntity;
}
@ -2286,7 +2288,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
protected void validateDeleteConflictsEmptyOrThrowException(List<DeleteConflict> theDeleteConflicts) {
public void validateDeleteConflictsEmptyOrThrowException(List<DeleteConflict> theDeleteConflicts) {
if (theDeleteConflicts.isEmpty()) {
return;
}
@ -2361,6 +2363,45 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
public static String decodeResource(byte[] theResourceBytes, ResourceEncodingEnum theResourceEncoding) {
String resourceText = null;
switch (theResourceEncoding) {
case JSON:
resourceText = new String(theResourceBytes, Charsets.UTF_8);
break;
case JSONC:
resourceText = GZipUtil.decompress(theResourceBytes);
break;
case DEL:
break;
}
return resourceText;
}
public static byte[] encodeResource(IBaseResource theResource, ResourceEncodingEnum theEncoding, Set<String> theExcludeElements, FhirContext theContext) {
byte[] bytes;
IParser parser = theEncoding.newParser(theContext);
parser.setDontEncodeElements(theExcludeElements);
String encoded = parser.encodeResourceToString(theResource);
switch (theEncoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
default:
case DEL:
bytes = new byte[0];
break;
}
ourLog.debug("Encoded {} chars of resource body as {} bytes", encoded.length(), bytes.length);
return bytes;
}
/**
* This method is used to create a set of all possible combinations of
* parameters across a set of search parameters. An example of why

View File

@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.lang.NonNull;
@ -207,7 +208,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
StopWatch w = new StopWatch();
T resourceToDelete = toResource(myResourceType, entity, null, null, false);
T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theReques != null) {
@ -289,7 +290,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
deletedResources.add(entity);
T resourceToDelete = toResource(myResourceType, entity, null, null, false);
T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theRequest != null) {
@ -394,16 +395,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
"This server cannot create an entity with a user-specified numeric ID - Client should not specify an ID when creating a new resource, or should include at least one letter in the ID to force a client-defined ID");
}
createForcedIdIfNeeded(entity, theResource.getIdElement());
if (entity.getForcedId() != null) {
try {
translateForcedIdToPid(getResourceName(), theResource.getIdElement().getIdPart());
throw new UnprocessableEntityException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "duplicateCreateForcedId", theResource.getIdElement().getIdPart()));
} catch (ResourceNotFoundException e) {
// good, this ID doesn't exist so we can create it
}
}
}
// Notify interceptors
@ -517,6 +508,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) {
@ -532,6 +524,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
@ -854,16 +847,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
BaseHasResource entity = readEntity(theId);
validateResourceType(entity);
T retVal = toResource(myResourceType, entity, null, null, false);
T retVal = toResource(myResourceType, entity, null, false);
IPrimitiveType<Date> deleted;
if (retVal instanceof IResource) {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
} else {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
}
if (deleted != null && !deleted.isEmpty()) {
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
if (entity.getDeleted() != null) {
throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
}
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
@ -930,10 +917,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public void reindex(T theResource, ResourceTable theEntity) {
ourLog.debug("Indexing resource {} - PID {}", theResource.getIdElement().getValue(), theEntity.getId());
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
updateEntity(null, theResource, theEntity, null, true, false, theEntity.getUpdatedDate(), true, false);
CURRENTLY_REINDEXING.put(theResource, null);
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, theEntity.getUpdatedDate(), true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
@Override
@ -1065,6 +1056,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
}
@PostConstruct
public void start() {
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
}
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
MT retVal;
try {
@ -1205,7 +1201,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
} else {
/*
* Note: resourcdeId will not be null or empty here, because we check it and reject requests in BaseOutcomeReturningMethodBindingWithResourceParam
* Note: resourceId will not be null or empty here, because we
* check it and reject requests in
* BaseOutcomeReturningMethodBindingWithResourceParam
*/
resourceId = theResource.getIdElement();
@ -1336,9 +1334,4 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
@PostConstruct
public void start() {
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
}
}

View File

@ -256,6 +256,9 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
@Override
@Transactional(propagation = Propagation.NEVER)
public Integer performReindexingPass(final Integer theCount) {
if (getConfig().isStatusBasedReindexingDisabled()) {
return -1;
}
if (!myReindexLock.tryLock()) {
return -1;
}
@ -305,7 +308,8 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
final IBaseResource resource = toResource(resourceTable, false);
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resource.getClass());
Class<? extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
dao.reindex(resource, resourceTable);
return null;

View File

@ -20,45 +20,45 @@ package ca.uhn.fhir.jpa.dao;
* #L%
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.FhirTerser;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.ObjectUtils;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.annotations.VisibleForTesting;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.util.FhirTerser;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
@Autowired
private FhirContext myContext;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
public BaseSearchParamExtractor() {
super();
}
public BaseSearchParamExtractor(FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
public BaseSearchParamExtractor(DaoConfig theDaoConfig, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
myContext = theCtx;
mySearchParamRegistry = theSearchParamRegistry;
myDaoConfig = theDaoConfig;
}
@Override
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
List<PathAndRef> refs = new ArrayList<PathAndRef>();
@ -75,40 +75,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
return refs;
}
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
List<Object> values = new ArrayList<Object>();
String[] nextPathsSplit = SPLIT.split(thePaths);
FhirTerser t = myContext.newTerser();
for (String nextPath : nextPathsSplit) {
String nextPathTrimmed = nextPath.trim();
try {
List<Object> allValues = t.getValues(theResource, nextPathTrimmed);
for (Object next : allValues) {
if (next instanceof IBaseExtension) {
IBaseDatatype value = ((IBaseExtension) next).getValue();
if (value != null) {
values.add(value);
}
} else {
values.add(next);
}
}
} catch (Exception e) {
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] { nextPathTrimmed, def.getName(), e.toString(), e } );
}
}
return values;
}
protected abstract List<Object> extractValues(String thePaths, IBaseResource theResource);
protected FhirContext getContext() {
return myContext;
}
public DaoConfig getDaoConfig() {
return myDaoConfig;
}
public Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
List<RuntimeSearchParam> defaultList= Collections.emptyList();
List<RuntimeSearchParam> defaultList = Collections.emptyList();
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
return retVal;
}

View File

@ -8,6 +8,8 @@ import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.model.Bundle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
@ -20,9 +22,9 @@ import java.util.*;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -58,6 +60,10 @@ public class DaoConfig {
* @see #setTranslationCachesExpireAfterWriteInMinutes(Long)
*/
public static final Long DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES = 60L;
/**
* See {@link #setStatusBasedReindexingDisabled(boolean)}
*/
public static final String DISABLE_STATUS_BASED_REINDEX = "disable_status_based_reindex";
/**
* Default value for {@link #setMaximumSearchResultCountInTransaction(Integer)}
*
@ -77,6 +83,7 @@ public class DaoConfig {
Bundle.BundleType.DOCUMENT.toCode(),
Bundle.BundleType.MESSAGE.toCode()
)));
private static final Logger ourLog = LoggerFactory.getLogger(DaoConfig.class);
private IndexEnabledEnum myIndexMissingFieldsEnabled = IndexEnabledEnum.DISABLED;
/**
* update setter javadoc if default changes
@ -89,8 +96,7 @@ public class DaoConfig {
/**
* update setter javadoc if default changes
*/
private boolean myAllowContainsSearches = true;
private boolean myAllowContainsSearches = false;
/**
* update setter javadoc if default changes
*/
@ -139,11 +145,13 @@ public class DaoConfig {
private boolean myAutoCreatePlaceholderReferenceTargets;
private Integer myCacheControlNoStoreMaxResultsUpperLimit = 1000;
private Integer myCountSearchResultsUpTo = null;
private boolean myStatusBasedReindexingDisabled;
private IdStrategyEnum myResourceServerIdStrategy = IdStrategyEnum.SEQUENTIAL_NUMERIC;
private boolean myMarkResourcesForReindexingUponSearchParameterChange;
private boolean myExpungeEnabled;
private int myReindexThreadCount;
private Set<String> myBundleTypesAllowedForStorage;
private boolean myValidateSearchParameterExpressionsOnSave = true;
/**
* Constructor
@ -155,6 +163,38 @@ public class DaoConfig {
setMarkResourcesForReindexingUponSearchParameterChange(true);
setReindexThreadCount(Runtime.getRuntime().availableProcessors());
setBundleTypesAllowedForStorage(DEFAULT_BUNDLE_TYPES_ALLOWED_FOR_STORAGE);
if ("true".equalsIgnoreCase(System.getProperty(DISABLE_STATUS_BASED_REINDEX))) {
ourLog.info("Status based reindexing is DISABLED");
setStatusBasedReindexingDisabled(true);
}
}
/**
* If set to <code>true</code> (default is false), the reindexing of search parameters
* using a query on the HFJ_RESOURCE.SP_INDEX_STATUS column will be disabled completely.
* This query is just not efficient on Oracle and bogs the system down when there are
* a lot of resources. A more efficient way of doing this will be introduced
* in the next release of HAPI FHIR.
*
* @since 3.5.0
*/
public boolean isStatusBasedReindexingDisabled() {
return myStatusBasedReindexingDisabled;
}
/**
* If set to <code>true</code> (default is false), the reindexing of search parameters
* using a query on the HFJ_RESOURCE.SP_INDEX_STATUS column will be disabled completely.
* This query is just not efficient on Oracle and bogs the system down when there are
* a lot of resources. A more efficient way of doing this will be introduced
* in the next release of HAPI FHIR.
*
* @since 3.5.0
*/
public void setStatusBasedReindexingDisabled(boolean theStatusBasedReindexingDisabled) {
myStatusBasedReindexingDisabled = theStatusBasedReindexingDisabled;
}
/**
@ -460,6 +500,16 @@ public class DaoConfig {
myInterceptors = theInterceptors;
}
/**
* This may be used to optionally register server interceptors directly against the DAOs.
*/
public void setInterceptors(IServerInterceptor... theInterceptor) {
setInterceptors(new ArrayList<IServerInterceptor>());
if (theInterceptor != null && theInterceptor.length != 0) {
getInterceptors().addAll(Arrays.asList(theInterceptor));
}
}
/**
* See {@link #setMaximumExpansionSize(int)}
*/
@ -705,9 +755,9 @@ public class DaoConfig {
* references instead of being treated as real references.
* <p>
* A logical reference is a reference which is treated as an identifier, and
* does not neccesarily resolve. See {@link "http://hl7.org/fhir/references.html"} for
* does not neccesarily resolve. See <a href="http://hl7.org/fhir/references.html">references</a> for
* a description of logical references. For example, the valueset
* {@link "http://hl7.org/fhir/valueset-quantity-comparator.html"} is a logical
* <a href="http://hl7.org/fhir/valueset-quantity-comparator.html">valueset-quantity-comparator</a> is a logical
* reference.
* </p>
* <p>
@ -730,9 +780,9 @@ public class DaoConfig {
* references instead of being treated as real references.
* <p>
* A logical reference is a reference which is treated as an identifier, and
* does not neccesarily resolve. See {@link "http://hl7.org/fhir/references.html"} for
* does not neccesarily resolve. See <a href="http://hl7.org/fhir/references.html">references</a> for
* a description of logical references. For example, the valueset
* {@link "http://hl7.org/fhir/valueset-quantity-comparator.html"} is a logical
* <a href="http://hl7.org/fhir/valueset-quantity-comparator.html">valueset-quantity-comparator</a> is a logical
* reference.
* </p>
* <p>
@ -754,7 +804,15 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public boolean isAllowContainsSearches() {
return myAllowContainsSearches;
@ -764,7 +822,15 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
this.myAllowContainsSearches = theAllowContainsSearches;
@ -1171,6 +1237,34 @@ public class DaoConfig {
myUniqueIndexesEnabled = theUniqueIndexesEnabled;
}
/**
* If <code>true</code> (default is <code>true</code>), before allowing a
* SearchParameter resource to be stored (create, update, etc.) the
* expression will be performed against an empty resource to ensure that
* the FHIRPath executor is able to process it.
* <p>
* This should proabably always be set to true, but is configurable
* in order to support some unit tests.
* </p>
*/
public boolean isValidateSearchParameterExpressionsOnSave() {
return myValidateSearchParameterExpressionsOnSave;
}
/**
* If <code>true</code> (default is <code>true</code>), before allowing a
* SearchParameter resource to be stored (create, update, etc.) the
* expression will be performed against an empty resource to ensure that
* the FHIRPath executor is able to process it.
* <p>
* This should proabably always be set to true, but is configurable
* in order to support some unit tests.
* </p>
*/
public void setValidateSearchParameterExpressionsOnSave(boolean theValidateSearchParameterExpressionsOnSave) {
myValidateSearchParameterExpressionsOnSave = theValidateSearchParameterExpressionsOnSave;
}
/**
* Do not call this method, it exists only for legacy reasons. It
* will be removed in a future version. Configure the page size on your
@ -1196,16 +1290,6 @@ public class DaoConfig {
// nothing
}
/**
* This may be used to optionally register server interceptors directly against the DAOs.
*/
public void setInterceptors(IServerInterceptor... theInterceptor) {
setInterceptors(new ArrayList<IServerInterceptor>());
if (theInterceptor != null && theInterceptor.length != 0) {
getInterceptors().addAll(Arrays.asList(theInterceptor));
}
}
/**
* @deprecated As of HAPI FHIR 3.0.0, subscriptions no longer use polling for
* detecting changes, so this setting has no effect
@ -1237,18 +1321,6 @@ public class DaoConfig {
setSubscriptionPurgeInactiveAfterMillis(theSeconds * DateUtils.MILLIS_PER_SECOND);
}
private static void validateTreatBaseUrlsAsLocal(String theUrl) {
Validate.notBlank(theUrl, "Base URL must not be null or empty");
int starIdx = theUrl.indexOf('*');
if (starIdx != -1) {
if (starIdx != theUrl.length() - 1) {
throw new IllegalArgumentException("Base URL wildcard character (*) can only appear at the end of the string: " + theUrl);
}
}
}
public enum IndexEnabledEnum {
ENABLED,
DISABLED
@ -1266,4 +1338,16 @@ public class DaoConfig {
UUID
}
private static void validateTreatBaseUrlsAsLocal(String theUrl) {
Validate.notBlank(theUrl, "Base URL must not be null or empty");
int starIdx = theUrl.indexOf('*');
if (starIdx != -1) {
if (starIdx != theUrl.length() - 1) {
throw new IllegalArgumentException("Base URL wildcard character (*) can only appear at the end of the string: " + theUrl);
}
}
}
}

View File

@ -0,0 +1,41 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.model.dstu2.resource.Composition;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.http.HttpServletRequest;
public class FhirResourceDaoCompositionDstu2 extends FhirResourceDaoDstu2<Composition>implements IFhirResourceDaoComposition<Composition> {
@Override
public IBundleProvider getDocumentForComposition(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType<Integer> theCount, DateRangeParam theLastUpdate, SortSpec theSort, RequestDetails theRequestDetails) {
throw new NotImplementedOperationException("$document not implemented in DSTU2");
}
}

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.model.dstu2.resource.MessageHeader;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import org.hl7.fhir.instance.model.api.IBaseBundle;
public class FhirResourceDaoMessageHeaderDstu2 extends FhirResourceDaoDstu2<MessageHeader> implements IFhirResourceDaoMessageHeader<MessageHeader> {
@Override
public IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
public static IBaseBundle throwProcessMessageNotImplemented() {
throw new NotImplementedOperationException("This operation is not yet implemented on this server");
}
}

View File

@ -29,14 +29,8 @@ import ca.uhn.fhir.model.dstu2.resource.SearchParameter;
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum;
import ca.uhn.fhir.model.primitive.BoundCodeDt;
import ca.uhn.fhir.model.primitive.CodeDt;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -83,7 +77,7 @@ public class FhirResourceDaoSearchParameterDstu2 extends FhirResourceDaoDstu2<Se
FhirContext context = getContext();
SearchParamTypeEnum type = theResource.getTypeElement().getValueAsEnum();
FhirResourceDaoSearchParameterR4.validateSearchParam(type, status, base, expression, context);
FhirResourceDaoSearchParameterR4.validateSearchParam(type, status, base, expression, context, getConfig());
}

View File

@ -28,6 +28,7 @@ import java.util.*;
import javax.annotation.PostConstruct;
import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.model.api.IIdType;
@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
@Qualifier("myFhirContextDstu2Hl7Org")
private FhirContext myRiCtx;
private ValidationSupportChain myValidationSupport;
private CachingValidationSupport myValidationSupport;
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
if (isBlank(theFilter)) {
@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
public void postConstruct() {
super.postConstruct();
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
}
@Override

View File

@ -9,7 +9,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
@ -59,6 +59,6 @@ public interface IDao {
IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation);
<R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, ResourceHistoryTable theHistory, Collection<ResourceTag> theTagList, boolean theForHistoryOperation);
<R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation);
}

View File

@ -0,0 +1,37 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.servlet.http.HttpServletRequest;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IFhirResourceDaoComposition<T extends IBaseResource> extends IFhirResourceDao<T> {
IBundleProvider getDocumentForComposition(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType<Integer> theCount, DateRangeParam theLastUpdate, SortSpec theSort, RequestDetails theRequestDetails);
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IFhirResourceDaoMessageHeader<T extends IBaseResource> extends IFhirResourceDao<T> {
IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
}

View File

@ -38,8 +38,8 @@ public interface ISearchBuilder {
void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager theEntityManager,
FhirContext theContext, IDao theDao);
Set<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
DateRangeParam theLastUpdated);
Set<Long> loadIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
DateRangeParam theLastUpdated);
/**
* How many results may be fetched at once

View File

@ -1,58 +1,5 @@
package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.AbstractQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaBuilder.In;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Order;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.criteria.Subquery;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/*
* #%L
* HAPI FHIR JPA Server
@ -72,46 +19,19 @@ import com.google.common.collect.Sets;
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeDeclaredChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.entity.ForcedId;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
import ca.uhn.fhir.jpa.util.BaseIterator;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.model.base.composite.BaseQuantityDt;
@ -123,25 +43,42 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
import org.hibernate.query.criteria.internal.predicate.BooleanStaticAssertionPredicate;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.*;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.*;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.*;
/**
* The SearchBuilder is responsible for actually forming the SQL query that handles
@ -154,6 +91,8 @@ public class SearchBuilder implements ISearchBuilder {
private static Long NO_MORE = -1L;
private static HandlerTypeEnum ourLastHandlerMechanismForUnitTest;
private static SearchParameterMap ourLastHandlerParamsForUnitTest;
private static String ourLastHandlerThreadForUnitTest;
private static boolean ourTrackHandlersForUnitTest;
private List<Long> myAlsoIncludePids;
private CriteriaBuilder myBuilder;
private BaseHapiFhirDao<?> myCallingDao;
@ -174,9 +113,9 @@ public class SearchBuilder implements ISearchBuilder {
private IHapiTerminologySvc myTerminologySvc;
private int myFetchSize;
protected IResourceHistoryTableDao myResourceHistoryTableDao;
protected IResourceTagDao myResourceTagDao;
protected IResourceSearchViewDao myResourceSearchViewDao;
/**
* Constructor
*/
@ -184,7 +123,7 @@ public class SearchBuilder implements ISearchBuilder {
IFulltextSearchSvc theFulltextSearchSvc, BaseHapiFhirDao<?> theDao,
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao,
IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry,
IResourceHistoryTableDao theResourceHistoryTableDao, IResourceTagDao theResourceTagDao) {
IResourceTagDao theResourceTagDao, IResourceSearchViewDao theResourceViewDao) {
myContext = theFhirContext;
myEntityManager = theEntityManager;
myFulltextSearchSvc = theFulltextSearchSvc;
@ -193,8 +132,8 @@ public class SearchBuilder implements ISearchBuilder {
myForcedIdDao = theForcedIdDao;
myTerminologySvc = theTerminologySvc;
mySearchParamRegistry = theSearchParamRegistry;
myResourceHistoryTableDao = theResourceHistoryTableDao;
myResourceTagDao = theResourceTagDao;
myResourceSearchViewDao = theResourceViewDao;
}
private void addPredicateComposite(String theResourceName, RuntimeSearchParam theParamDef, List<? extends IQueryParameterType> theNextAnd) {
@ -330,7 +269,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
@ -346,8 +285,9 @@ public class SearchBuilder implements ISearchBuilder {
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
String invalidMessageName = "invalidNumberPrefix";
Predicate num = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
codePredicates.add(num);
Predicate predicateNumeric = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
Predicate predicateOuter = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, predicateNumeric );
codePredicates.add(predicateOuter);
} else {
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
@ -360,11 +300,10 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
Join<SearchParamPresent, SearchParam> paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
Long hash = SearchParamPresent.calculateHashPresence(theResourceName, theParamName, !theMissing);
myPredicates.add(myBuilder.equal(hashPresence, hash));
}
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin) {
@ -382,7 +321,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
@ -405,7 +344,7 @@ public class SearchBuilder implements ISearchBuilder {
Join<ResourceTable, ResourceLink> join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
@ -502,7 +441,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
resourceTypes = new ArrayList<Class<? extends IBaseResource>>(1);
resourceTypes = new ArrayList<>(1);
resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart();
}
@ -547,7 +486,7 @@ public class SearchBuilder implements ISearchBuilder {
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] {nextType.getSimpleName(), chain, remainingChain});
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType.getSimpleName(), chain, remainingChain);
continue;
}
@ -568,7 +507,7 @@ public class SearchBuilder implements ISearchBuilder {
Root<ResourceTable> subQfrom = subQ.from(ResourceTable.class);
subQ.select(subQfrom.get("myId").as(Long.class));
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<List<? extends IQueryParameterType>>();
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<>();
andOrParams.add(Collections.singletonList(chainValue));
/*
@ -619,7 +558,7 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateResourceId(List<List<? extends IQueryParameterType>> theValues) {
for (List<? extends IQueryParameterType> nextValue : theValues) {
Set<Long> orPids = new HashSet<Long>();
Set<Long> orPids = new HashSet<>();
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(myContext);
if (value != null && value.startsWith("|")) {
@ -667,10 +606,9 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType theParameter = nextOr;
Predicate singleCode = createPredicateString(theParameter, theResourceName, theParamName, myBuilder, join);
Predicate singleCode = createPredicateString(nextOr, theResourceName, theParamName, myBuilder, join);
codePredicates.add(singleCode);
}
@ -815,7 +753,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
if (nextOr instanceof TokenParam) {
@ -858,7 +796,6 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
Predicate predicate;
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
@ -887,14 +824,24 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
predicate = join.get("myUri").as(String.class).in(toFind);
Predicate uriPredicate = join.get("myUri").as(String.class).in(toFind);
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
predicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate uriPredicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else {
predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
codePredicates.add(hashPredicate);
}
codePredicates.add(predicate);
} else {
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
}
@ -912,16 +859,13 @@ public class SearchBuilder implements ISearchBuilder {
}
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
Predicate outerPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicate);
myPredicates.add(outerPredicate);
myPredicates.add(orPredicate);
}
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
return outerPredicate;
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
return myBuilder.and(hashIdentityPredicate, thePredicate);
}
private Predicate createCompositeParamPart(String theResourceName, Root<ResourceTable> theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
@ -1101,7 +1045,7 @@ public class SearchBuilder implements ISearchBuilder {
if (theParamName == null) {
return num;
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num);
return num;
}
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
@ -1127,39 +1071,31 @@ public class SearchBuilder implements ISearchBuilder {
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
}
Predicate system = null;
if (!isBlank(systemValue)) {
system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
}
Predicate code = null;
if (!isBlank(unitsValue)) {
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
Predicate hashPredicate;
if (!isBlank(systemValue) && !isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentitySystemAndUnits"), hash);
} else if (!isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashUnits(theResourceName, theParamName, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentityAndUnits"), hash);
} else {
long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hash);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression<BigDecimal> path = theFrom.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate numericPredicate = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate singleCode;
if (system == null && code == null) {
singleCode = num;
} else if (system == null) {
singleCode = theBuilder.and(code, num);
} else if (code == null) {
singleCode = theBuilder.and(system, num);
} else {
singleCode = theBuilder.and(system, code, num);
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
return theBuilder.and(hashPredicate, numericPredicate);
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
DaoConfig daoConfig = myCallingDao.getConfig();
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
if (!id.isText()) {
@ -1170,7 +1106,7 @@ public class SearchBuilder implements ISearchBuilder {
StringParam id = (StringParam) theParameter;
rawSearchTerm = id.getValue();
if (id.isContains()) {
if (!myCallingDao.getConfig().isAllowContainsSearches()) {
if (!daoConfig.isAllowContainsSearches()) {
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
}
}
@ -1186,22 +1122,34 @@ public class SearchBuilder implements ISearchBuilder {
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
myCallingDao.getConfig().isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
if (exactMatch) {
// Exact match
Long hash = ResourceIndexedSearchParamString.calculateHashExact(theResourceName, theParamName, rawSearchTerm);
return theBuilder.equal(theFrom.get("myHashExact").as(Long.class), hash);
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
singleCode = theBuilder.and(singleCode, exactCode);
}
// Normalized Match
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
String normalizedString = BaseHapiFhirDao.normalizeString(rawSearchTerm);
String likeExpression;
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
daoConfig.isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
} else {
likeExpression = createLeftMatchLikeExpression(normalizedString);
}
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(daoConfig, theResourceName, theParamName, normalizedString);
Predicate hashCode = theBuilder.equal(theFrom.get("myHashNormalizedPrefix").as(Long.class), hash);
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
return theBuilder.and(hashCode, singleCode);
}
}
private List<Predicate> createPredicateTagList(Path<TagDefinition> theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List<Pair<String, String>> theTokens) {
@ -1256,7 +1204,7 @@ public class SearchBuilder implements ISearchBuilder {
* Process token modifiers (:in, :below, :above)
*/
List<VersionIndependentConcept> codes = null;
List<VersionIndependentConcept> codes;
if (modifier == TokenParamModifier.IN) {
codes = myTerminologySvc.expandValueSet(code);
} else if (modifier == TokenParamModifier.ABOVE) {
@ -1265,81 +1213,53 @@ public class SearchBuilder implements ISearchBuilder {
} else if (modifier == TokenParamModifier.BELOW) {
system = determineSystemIfMissing(theParamName, code, system);
codes = myTerminologySvc.findCodesBelow(system, code);
}
ArrayList<Predicate> singleCodePredicates = new ArrayList<>();
if (codes != null) {
if (codes.isEmpty()) {
// This will never match anything
Predicate codePredicate = theBuilder.isNull(theFrom.get("myMissing"));
singleCodePredicates.add(codePredicate);
} else {
List<Predicate> orPredicates = new ArrayList<Predicate>();
Map<String, List<VersionIndependentConcept>> map = new HashMap<String, List<VersionIndependentConcept>>();
for (VersionIndependentConcept nextCode : codes) {
List<VersionIndependentConcept> systemCodes = map.get(nextCode.getSystem());
if (null == systemCodes) {
systemCodes = new ArrayList<>();
map.put(nextCode.getSystem(), systemCodes);
}
systemCodes.add(nextCode);
}
// Use "in" in case of large numbers of codes due to param modifiers
final Path<String> systemExpression = theFrom.get("mySystem");
final Path<String> valueExpression = theFrom.get("myValue");
for (Map.Entry<String, List<VersionIndependentConcept>> entry : map.entrySet()) {
Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
In<String> codePredicate = theBuilder.in(valueExpression);
for (VersionIndependentConcept nextCode : entry.getValue()) {
codePredicate.value(nextCode.getCode());
}
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
}
singleCodePredicates.add(theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
}
} else {
codes = Collections.singletonList(new VersionIndependentConcept(system, code));
}
/*
* Ok, this is a normal query
*/
if (codes.isEmpty()) {
// This will never match anything
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
}
if (StringUtils.isNotBlank(system)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
}
} else if (system == null) {
// don't check the system
/*
* Note: A null system value means "match any system", but
* an empty-string system value means "match values that
* explicitly have no system".
*/
boolean haveSystem = codes.get(0).getSystem() != null;
boolean haveCode = isNotBlank(codes.get(0).getCode());
Expression<Long> hashField;
if (!haveSystem && !haveCode) {
// If we have neither, this isn't actually an expression so
// just return 1=1
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, true);
} else if (haveSystem && haveCode) {
hashField = theFrom.get("myHashSystemAndValue").as(Long.class);
} else if (haveSystem) {
hashField = theFrom.get("myHashSystem").as(Long.class);
} else {
hashField = theFrom.get("myHashValue").as(Long.class);
}
List<Long> values = new ArrayList<>(codes.size());
for (VersionIndependentConcept next : codes) {
if (haveSystem && haveCode) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystemAndValue(theResourceName, theParamName, next.getSystem(), next.getCode()));
} else if (haveSystem) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystem(theResourceName, theParamName, next.getSystem()));
} else {
// If the system is "", we only match on null systems
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
}
if (StringUtils.isNotBlank(code)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
}
} else {
/*
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
* match all tokens with the given value.
*
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
*/
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
values.add(ResourceIndexedSearchParamToken.calculateHashValue(theResourceName, theParamName, next.getCode()));
}
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
Predicate predicate = hashField.in(values);
if (modifier == TokenParamModifier.NOT) {
Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName));
Predicate disjunctionPredicate = theBuilder.not(predicate);
predicate = theBuilder.and(identityPredicate, disjunctionPredicate);
}
return predicate;
}
@Override
@ -1378,8 +1298,11 @@ public class SearchBuilder implements ISearchBuilder {
}
Set<String> uniqueQueryStrings = BaseHapiFhirDao.extractCompositeStringUniquesValueChains(myResourceName, params);
ourLastHandlerParamsForUnitTest = theParams;
ourLastHandlerMechanismForUnitTest = HandlerTypeEnum.UNIQUE_INDEX;
if (ourTrackHandlersForUnitTest) {
ourLastHandlerParamsForUnitTest = theParams;
ourLastHandlerMechanismForUnitTest = HandlerTypeEnum.UNIQUE_INDEX;
ourLastHandlerThreadForUnitTest = Thread.currentThread().getName();
}
return new UniqueIndexIterator(uniqueQueryStrings);
}
@ -1390,8 +1313,11 @@ public class SearchBuilder implements ISearchBuilder {
}
}
ourLastHandlerParamsForUnitTest = theParams;
ourLastHandlerMechanismForUnitTest = HandlerTypeEnum.STANDARD_QUERY;
if (ourTrackHandlersForUnitTest) {
ourLastHandlerParamsForUnitTest = theParams;
ourLastHandlerMechanismForUnitTest = HandlerTypeEnum.STANDARD_QUERY;
ourLastHandlerThreadForUnitTest = Thread.currentThread().getName();
}
return new QueryIterator();
}
@ -1444,8 +1370,8 @@ public class SearchBuilder implements ISearchBuilder {
if (myParams.getEverythingMode() != null) {
Join<ResourceTable, ResourceLink> join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
if (myParams.get(BaseResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(BaseResource.SP_RES_ID).get(0).get(0);
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(1);
@ -1535,7 +1461,7 @@ public class SearchBuilder implements ISearchBuilder {
return false;
}
if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) {
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
@ -1675,52 +1601,39 @@ public class SearchBuilder implements ISearchBuilder {
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
Map<Long, Integer> position, Collection<Long> pids) {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<ResourceTable> cq = builder.createQuery(ResourceTable.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.where(from.get("myId").in(pids));
TypedQuery<ResourceTable> q = entityManager.createQuery(cq);
List<ResourceTable> resultList = q.getResultList();
// -- get the resource from the searchView
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids);
//-- Issue #963: Load resource histories based on pids once to improve the performance
Map<Long, ResourceHistoryTable> historyMap = getResourceHistoryMap(pids);
//-- preload all tags with tag definition if any
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resultList);
//-- pre-load all forcedId
Map<Long, ForcedId> forcedIdMap = getForcedIdMap(pids);
ForcedId forcedId = null;
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
Long resourceId = null;
for (ResourceTable next : resultList) {
for (ResourceSearchView next : resourceSearchViewList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
resourceId = next.getId();
forcedId = forcedIdMap.get(resourceId);
if (forcedId != null)
next.setForcedId(forcedId);
IBaseResource resource = theDao.toResource(resourceType, next, historyMap.get(next.getId()), tagMap.get(next.getId()), theForHistoryOperation);
IBaseResource resource = theDao.toResource(resourceType, next, tagMap.get(resourceId), theForHistoryOperation);
if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue;
}
Integer index = position.get(next.getId());
Integer index = position.get(resourceId);
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", next.getId());
ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue;
}
if (resource instanceof IResource) {
if (theRevIncludedPids.contains(next.getId())) {
if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
}
} else {
if (theRevIncludedPids.contains(next.getId())) {
if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
@ -1731,52 +1644,34 @@ public class SearchBuilder implements ISearchBuilder {
}
}
//-- load all history in to the map
private Map<Long, ResourceHistoryTable> getResourceHistoryMap(Collection<Long> pids) {
private Map<Long, Collection<ResourceTag>> getResourceTagMap(Collection<ResourceSearchView> theResourceSearchViewList) {
Map<Long, ResourceHistoryTable> historyMap = new HashMap<Long, ResourceHistoryTable>();
List<Long> idList = new ArrayList<Long>(theResourceSearchViewList.size());
if (pids.size() == 0)
return historyMap;
Collection<ResourceHistoryTable> historyList = myResourceHistoryTableDao.findByResourceIds(pids);
for (ResourceHistoryTable history : historyList) {
historyMap.put(history.getResourceId(), history);
}
return historyMap;
}
private Map<Long, Collection<ResourceTag>> getResourceTagMap(List<ResourceTable> resourceList) {
List<Long> idList = new ArrayList<Long>(resourceList.size());
//-- find all resource has tags
for (ResourceTable resource: resourceList) {
for (ResourceSearchView resource: theResourceSearchViewList) {
if (resource.isHasTags())
idList.add(resource.getId());
}
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<Long, Collection<ResourceTag>>();
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
//-- no tags
if (idList.size() == 0)
return tagMap;
//-- get all tags for the idList
Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList);
//-- build the map, key = resourceId, value = list of ResourceTag
Long resourceId;
Collection<ResourceTag> tagCol;
for (ResourceTag tag : tagList) {
resourceId = tag.getResourceId();
tagCol = tagMap.get(resourceId);
if (tagCol == null) {
tagCol = new ArrayList<ResourceTag>();
tagCol = new ArrayList<>();
tagCol.add(tag);
tagMap.put(resourceId, tagCol);
} else {
@ -1784,26 +1679,9 @@ public class SearchBuilder implements ISearchBuilder {
}
}
return tagMap;
return tagMap;
}
//-- load all forcedId in to the map
private Map<Long, ForcedId> getForcedIdMap(Collection<Long> pids) {
Map<Long, ForcedId> forceIdMap = new HashMap<Long, ForcedId>();
if (pids.size() == 0)
return forceIdMap;
Collection<ForcedId> forceIdList = myForcedIdDao.findByResourcePids(pids);
for (ForcedId forcedId : forceIdList) {
forceIdMap.put(forcedId.getResourcePid(), forcedId);
}
return forceIdMap;
}
@Override
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
@ -1840,18 +1718,16 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
*
* @param theLastUpdated
* THIS SHOULD RETURN HASHSET and not just Set because we add to it later (so it can't be Collections.emptySet())
*/
@Override
public HashSet<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated) {
public HashSet<Long> loadIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
return new HashSet<Long>();
return new HashSet<>();
}
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
return new HashSet<Long>();
return new HashSet<>();
}
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
@ -1878,7 +1754,7 @@ public class SearchBuilder implements ISearchBuilder {
boolean matchAll = "*".equals(nextInclude.getValue());
if (matchAll) {
String sql;
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids)";
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("target_pids", nextRoundMatches);
List<ResourceLink> results = q.getResultList();
@ -1892,7 +1768,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
List<String> paths;
RuntimeSearchParam param = null;
RuntimeSearchParam param;
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
continue;
@ -1953,8 +1829,10 @@ public class SearchBuilder implements ISearchBuilder {
}
}
if (theLastUpdated != null && (theLastUpdated.getLowerBoundAsInstant() != null || theLastUpdated.getUpperBoundAsInstant() != null)) {
pidsToInclude = new HashSet<>(filterResourceIdsByLastUpdated(theEntityManager, theLastUpdated, pidsToInclude));
if (theReverseMode) {
if (theLastUpdated != null && (theLastUpdated.getLowerBoundAsInstant() != null || theLastUpdated.getUpperBoundAsInstant() != null)) {
pidsToInclude = new HashSet<>(filterResourceIdsByLastUpdated(theEntityManager, theLastUpdated, pidsToInclude));
}
}
for (Long next : pidsToInclude) {
if (original.contains(next) == false && allAdded.contains(next) == false) {
@ -1966,19 +1844,15 @@ public class SearchBuilder implements ISearchBuilder {
nextRoundMatches = pidsToInclude;
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
ourLog.info("Loaded {} {} in {} rounds and {} ms", new Object[] {allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart()});
ourLog.info("Loaded {} {} in {} rounds and {} ms", allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart());
return allAdded;
}
private void searchForIdsWithAndOr(SearchParameterMap theParams) {
SearchParameterMap params = theParams;
if (params == null) {
params = new SearchParameterMap();
}
private void searchForIdsWithAndOr(@Nonnull SearchParameterMap theParams) {
myParams = theParams;
for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : params.entrySet()) {
for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : myParams.entrySet()) {
String nextParamName = nextParamEntry.getKey();
List<List<? extends IQueryParameterType>> andOrParams = nextParamEntry.getValue();
searchForIdsWithAndOr(myResourceName, nextParamName, andOrParams);
@ -2241,14 +2115,16 @@ public class SearchBuilder implements ISearchBuilder {
}
@VisibleForTesting
public static SearchParameterMap getLastHandlerParamsForUnitTest() {
return ourLastHandlerParamsForUnitTest;
public static String getLastHandlerParamsForUnitTest() {
return ourLastHandlerParamsForUnitTest.toString() + " on thread [" + ourLastHandlerThreadForUnitTest +"]";
}
@VisibleForTesting
public static void resetLastHandlerMechanismForUnitTest() {
ourLastHandlerMechanismForUnitTest = null;
ourLastHandlerParamsForUnitTest = null;
ourLastHandlerThreadForUnitTest = null;
ourTrackHandlersForUnitTest = true;
}
static Predicate[] toArray(List<Predicate> thePredicates) {
@ -2305,7 +2181,7 @@ public class SearchBuilder implements ISearchBuilder {
myCurrentOffset = end;
Collection<Long> pidsToScan = myCurrentPids.subList(start, end);
Set<Include> includes = Collections.singleton(new Include("*", true));
Set<Long> newPids = loadReverseIncludes(myCallingDao, myContext, myEntityManager, pidsToScan, includes, false, myParams.getLastUpdated());
Set<Long> newPids = loadIncludes(myCallingDao, myContext, myEntityManager, pidsToScan, includes, false, myParams.getLastUpdated());
myCurrentIterator = newPids.iterator();
}

View File

@ -19,33 +19,40 @@ package ca.uhn.fhir.jpa.dao;
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.math.BigDecimal;
import java.util.*;
import javax.measure.quantity.Quantity;
import javax.measure.unit.NonSI;
import javax.measure.unit.Unit;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.api.IDatatype;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IValueSetEnumBinder;
import ca.uhn.fhir.model.base.composite.BaseHumanNameDt;
import ca.uhn.fhir.model.dstu2.composite.*;
import ca.uhn.fhir.model.dstu2.composite.BoundCodeableConceptDt;
import ca.uhn.fhir.model.dstu2.resource.*;
import ca.uhn.fhir.model.dstu2.resource.Conformance.RestSecurity;
import ca.uhn.fhir.model.dstu2.resource.Location;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.resource.Patient.Communication;
import ca.uhn.fhir.model.dstu2.resource.Questionnaire;
import ca.uhn.fhir.model.dstu2.resource.ValueSet;
import ca.uhn.fhir.model.dstu2.valueset.RestfulSecurityServiceEnum;
import ca.uhn.fhir.model.primitive.*;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.FhirTerser;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.measure.quantity.Quantity;
import javax.measure.unit.NonSI;
import javax.measure.unit.Unit;
import java.math.BigDecimal;
import java.util.*;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implements ISearchParamExtractor {
@ -59,7 +66,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -68,7 +75,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -81,13 +88,13 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
/*
* (non-Javadoc)
*
*
* @see ca.uhn.fhir.jpa.dao.ISearchParamExtractor#extractSearchParamDates(ca.uhn.fhir.jpa.entity.ResourceTable,
* ca.uhn.fhir.model.api.IResource)
*/
@Override
public Set<ResourceIndexedSearchParamDate> extractSearchParamDates(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<ResourceIndexedSearchParamDate>();
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
@ -142,7 +149,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
/*
* (non-Javadoc)
*
*
* @see ca.uhn.fhir.jpa.dao.ISearchParamExtractor#extractSearchParamNumber(ca.uhn.fhir.jpa.entity.ResourceTable,
* ca.uhn.fhir.model.api.IResource)
*/
@ -196,7 +203,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
* org.unitsofmeasurement.quantity.Quantity<?>>)
* UCUMFormat.getCaseInsensitiveInstance().parse(nextValue.getCode().getValue(), null); if
* (unit.isCompatible(UCUM.DAY)) {
*
*
* @SuppressWarnings("unchecked") PhysicsUnit<org.unitsofmeasurement.quantity.Time> timeUnit =
* (PhysicsUnit<Time>) unit; UnitConverter conv = timeUnit.getConverterTo(UCUM.DAY); double
* dayValue = conv.convert(nextValue.getValue().getValue().doubleValue()); DurationDt newValue =
@ -251,7 +258,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
/*
* (non-Javadoc)
*
*
* @see ca.uhn.fhir.jpa.dao.ISearchParamExtractor#extractSearchParamQuantity(ca.uhn.fhir.jpa.entity.ResourceTable,
* ca.uhn.fhir.model.api.IResource)
*/
@ -305,7 +312,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
/*
* (non-Javadoc)
*
*
* @see ca.uhn.fhir.jpa.dao.ISearchParamExtractor#extractSearchParamStrings(ca.uhn.fhir.jpa.entity.ResourceTable,
* ca.uhn.fhir.model.api.IResource)
*/
@ -314,7 +321,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
HashSet<ResourceIndexedSearchParamString> retVal = new HashSet<ResourceIndexedSearchParamString>();
String resourceName = getContext().getResourceDefinition(theResource).getName();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
if (nextSpDef.getParamType() != RestSearchParameterTypeEnum.STRING) {
@ -389,7 +396,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
/*
* (non-Javadoc)
*
*
* @see ca.uhn.fhir.jpa.dao.ISearchParamExtractor#extractSearchParamTokens(ca.uhn.fhir.jpa.entity.ResourceTable,
* ca.uhn.fhir.model.api.IResource)
*/
@ -626,6 +633,35 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
}
}
@Override
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
List<Object> values = new ArrayList<>();
String[] nextPathsSplit = SPLIT.split(thePaths);
FhirTerser t = getContext().newTerser();
for (String nextPath : nextPathsSplit) {
String nextPathTrimmed = nextPath.trim();
List<Object> allValues;
try {
allValues = t.getValues(theResource, nextPathTrimmed);
} catch (Exception e) {
String msg = getContext().getLocalizer().getMessage(BaseSearchParamExtractor.class, "failedToExtractPaths", nextPath, e.toString());
throw new InternalErrorException(msg, e);
}
for (Object next : allValues) {
if (next instanceof IBaseExtension) {
IBaseDatatype value = ((IBaseExtension) next).getValue();
if (value != null) {
values.add(value);
}
} else {
values.add(next);
}
}
}
return values;
}
private static <T extends Enum<?>> String extractSystem(BoundCodeDt<T> theBoundCode) {
if (theBoundCode.getValueAsEnum() != null) {
IValueSetEnumBinder<T> binder = theBoundCode.getBinder();

View File

@ -1,5 +1,24 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterOr;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.*;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
@ -21,20 +40,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* limitations under the License.
* #L%
*/
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.UrlUtil;
public class SearchParameterMap extends LinkedHashMap<String, List<List<? extends IQueryParameterType>>> {
@ -48,7 +53,7 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
private Integer myLoadSynchronousUpTo;
private Set<Include> myRevIncludes;
private SortSpec mySort;
/**
* Constructor
*/
@ -130,7 +135,7 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
private void addUrlIncludeParams(StringBuilder b, String paramName, Set<Include> theList) {
ArrayList<Include> list = new ArrayList<Include>(theList);
Collections.sort(list, new IncludeComparator());
for (Include nextInclude : list) {
addUrlParamSeparator(b);
@ -158,10 +163,18 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myCount;
}
public void setCount(Integer theCount) {
myCount = theCount;
}
public EverythingModeEnum getEverythingMode() {
return myEverythingMode;
}
public void setEverythingMode(EverythingModeEnum theConsolidateMatches) {
myEverythingMode = theConsolidateMatches;
}
public Set<Include> getIncludes() {
if (myIncludes == null) {
myIncludes = new HashSet<Include>();
@ -169,6 +182,10 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myIncludes;
}
public void setIncludes(Set<Include> theIncludes) {
myIncludes = theIncludes;
}
/**
* Returns null if there is no last updated value
*/
@ -181,6 +198,10 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myLastUpdated;
}
public void setLastUpdated(DateRangeParam theLastUpdated) {
myLastUpdated = theLastUpdated;
}
/**
* Returns null if there is no last updated value, and removes the lastupdated
* value from this map
@ -199,6 +220,19 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myLoadSynchronousUpTo;
}
/**
* If set, tells the server to load these results synchronously, and not to load
* more than X results. Note that setting this to a value will also set
* {@link #setLoadSynchronous(boolean)} to true
*/
public SearchParameterMap setLoadSynchronousUpTo(Integer theLoadSynchronousUpTo) {
myLoadSynchronousUpTo = theLoadSynchronousUpTo;
if (myLoadSynchronousUpTo != null) {
setLoadSynchronous(true);
}
return this;
}
public Set<Include> getRevIncludes() {
if (myRevIncludes == null) {
myRevIncludes = new HashSet<>();
@ -206,10 +240,18 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myRevIncludes;
}
public void setRevIncludes(Set<Include> theRevIncludes) {
myRevIncludes = theRevIncludes;
}
public SortSpec getSort() {
return mySort;
}
public void setSort(SortSpec theSort) {
mySort = theSort;
}
/**
* This will only return true if all parameters have no modifier of any kind
*/
@ -234,22 +276,6 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return myLoadSynchronous;
}
public void setCount(Integer theCount) {
myCount = theCount;
}
public void setEverythingMode(EverythingModeEnum theConsolidateMatches) {
myEverythingMode = theConsolidateMatches;
}
public void setIncludes(Set<Include> theIncludes) {
myIncludes = theIncludes;
}
public void setLastUpdated(DateRangeParam theLastUpdated) {
myLastUpdated = theLastUpdated;
}
/**
* If set, tells the server to load these results synchronously, and not to load
* more than X results
@ -259,27 +285,6 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
return this;
}
/**
* If set, tells the server to load these results synchronously, and not to load
* more than X results. Note that setting this to a value will also set
* {@link #setLoadSynchronous(boolean)} to true
*/
public SearchParameterMap setLoadSynchronousUpTo(Integer theLoadSynchronousUpTo) {
myLoadSynchronousUpTo = theLoadSynchronousUpTo;
if (myLoadSynchronousUpTo != null) {
setLoadSynchronous(true);
}
return this;
}
public void setRevIncludes(Set<Include> theRevIncludes) {
myRevIncludes = theRevIncludes;
}
public void setSort(SortSpec theSort) {
mySort = theSort;
}
public String toNormalizedQueryString(FhirContext theCtx) {
StringBuilder b = new StringBuilder();
@ -298,7 +303,7 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
nextValuesOrsOut.add(nextValueOrIn);
}
}
Collections.sort(nextValuesOrsOut, new QueryParameterTypeComparator(theCtx));
if (nextValuesOrsOut.size() > 0) {
@ -308,7 +313,7 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
} // for AND
Collections.sort(nextValuesAndsOut, new QueryParameterOrComparator(theCtx));
for (List<IQueryParameterType> nextValuesAnd : nextValuesAndsOut) {
addUrlParamSeparator(b);
IQueryParameterType firstValue = nextValuesAnd.get(0);
@ -319,18 +324,18 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
b.append('=');
if (firstValue.getMissing()) {
b.append(Constants.PARAMQUALIFIER_MISSING_TRUE);
}else {
} else {
b.append(Constants.PARAMQUALIFIER_MISSING_FALSE);
}
continue;
}
if (isNotBlank(firstValue.getQueryParameterQualifier())){
if (isNotBlank(firstValue.getQueryParameterQualifier())) {
b.append(firstValue.getQueryParameterQualifier());
}
b.append('=');
for (int i = 0; i < nextValuesAnd.size(); i++) {
IQueryParameterType nextValueOr = nextValuesAnd.get(i);
if (i > 0) {
@ -341,13 +346,13 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
b.append(UrlUtil.escapeUrlParam(valueAsQueryToken));
}
}
} // for keys
SortSpec sort = getSort();
boolean first = true;
while (sort != null) {
if (isNotBlank(sort.getParamName())) {
if (first) {
addUrlParamSeparator(b);
@ -362,32 +367,32 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
}
b.append(sort.getParamName());
}
Validate.isTrue(sort != sort.getChain()); // just in case, shouldn't happen
sort = sort.getChain();
}
addUrlIncludeParams(b, Constants.PARAM_INCLUDE, getIncludes());
addUrlIncludeParams(b, Constants.PARAM_REVINCLUDE, getRevIncludes());
if (getLastUpdated() != null) {
DateParam lb = getLastUpdated().getLowerBound();
addLastUpdateParam(b, lb);
DateParam ub = getLastUpdated().getUpperBound();
addLastUpdateParam(b, ub);
}
if (getCount() != null) {
addUrlParamSeparator(b);
b.append(Constants.PARAM_COUNT);
b.append('=');
b.append(getCount());
}
if (b.length() == 0) {
b.append('?');
}
return b.toString();
}
@ -439,7 +444,10 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
/*
* Don't reorder! We rely on the ordinals
*/
ENCOUNTER_INSTANCE(false, true, true), ENCOUNTER_TYPE(false, true, false), PATIENT_INSTANCE(true, false, true), PATIENT_TYPE(true, false, false);
ENCOUNTER_INSTANCE(false, true, true),
ENCOUNTER_TYPE(false, true, false),
PATIENT_INSTANCE(true, false, true),
PATIENT_TYPE(true, false, false);
private final boolean myEncounter;
@ -447,7 +455,7 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
private final boolean myPatient;
private EverythingModeEnum(boolean thePatient, boolean theEncounter, boolean theInstance) {
EverythingModeEnum(boolean thePatient, boolean theEncounter, boolean theInstance) {
assert thePatient ^ theEncounter;
myPatient = thePatient;
myEncounter = theEncounter;

Some files were not shown because too many files have changed in this diff Show More