Merge branch 'master' into 3014-prevent-delete-expunge-cascade

This commit is contained in:
Tadgh 2021-09-23 09:33:04 -04:00 committed by GitHub
commit ad9e0f1c9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
331 changed files with 2723 additions and 1414 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -18,6 +18,9 @@
<dependencies>
<!-- IMPORTANT NOTE: This pom is used by multiple platforms (including Android) so please avoid adding any new
dependencies here. -->
<!-- JSON -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>

View File

@ -1986,13 +1986,17 @@ public enum Pointcut implements IPointcut {
* <ul>
* <li>ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage - This parameter should not be modified as processing is complete when this hook is invoked.</li>
* <li>ca.uhn.fhir.rest.server.TransactionLogMessages - This parameter is for informational messages provided by the MDM module during MDM processing.</li>
* <li>ca.uhn.fhir.mdm.api.MdmLinkChangeEvent - Contains information about the change event, including target and golden resource IDs and the operation type.</li>
* </ul>
* </p>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
MDM_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage", "ca.uhn.fhir.rest.server.TransactionLogMessages"),
MDM_AFTER_PERSISTED_RESOURCE_CHECKED(void.class,
"ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage",
"ca.uhn.fhir.rest.server.TransactionLogMessages",
"ca.uhn.fhir.mdm.api.MdmLinkEvent"),
/**
* <b>Performance Tracing Hook:</b>

View File

@ -48,6 +48,7 @@ import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Matcher;
@ -704,6 +705,21 @@ public class FhirTerser {
* @throws IllegalArgumentException If theTarget does not contain both a resource type and ID
*/
public boolean isSourceInCompartmentForTarget(String theCompartmentName, IBaseResource theSource, IIdType theTarget) {
return isSourceInCompartmentForTarget(theCompartmentName, theSource, theTarget, null);
}
/**
* Returns <code>true</code> if <code>theSource</code> is in the compartment named <code>theCompartmentName</code>
* belonging to resource <code>theTarget</code>
*
* @param theCompartmentName The name of the compartment
* @param theSource The potential member of the compartment
* @param theTarget The owner of the compartment. Note that both the resource type and ID must be filled in on this IIdType or the method will throw an {@link IllegalArgumentException}
* @param theAdditionalCompartmentParamNames If provided, search param names provided here will be considered as included in the given compartment for this comparison.
* @return <code>true</code> if <code>theSource</code> is in the compartment or one of the additional parameters matched.
* @throws IllegalArgumentException If theTarget does not contain both a resource type and ID
*/
public boolean isSourceInCompartmentForTarget(String theCompartmentName, IBaseResource theSource, IIdType theTarget, Set<String> theAdditionalCompartmentParamNames) {
Validate.notBlank(theCompartmentName, "theCompartmentName must not be null or blank");
Validate.notNull(theSource, "theSource must not be null");
Validate.notNull(theTarget, "theTarget must not be null");
@ -720,6 +736,20 @@ public class FhirTerser {
}
List<RuntimeSearchParam> params = sourceDef.getSearchParamsForCompartmentName(theCompartmentName);
//If passed an additional set of searchparameter names, add them for comparison purposes.
if (theAdditionalCompartmentParamNames != null) {
List<RuntimeSearchParam> additionalParams = theAdditionalCompartmentParamNames.stream().map(sourceDef::getSearchParam)
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (params == null || params.isEmpty()) {
params = additionalParams;
} else {
params.addAll(additionalParams);
}
}
for (RuntimeSearchParam nextParam : params) {
for (String nextPath : nextParam.getPathsSplit()) {

View File

@ -192,4 +192,26 @@ public class TestUtil {
return stripReturns(theString).replace(" ", "");
}
public static void sleepAtLeast(long theMillis) {
sleepAtLeast(theMillis, true);
}
@SuppressWarnings("BusyWait")
public static void sleepAtLeast(long theMillis, boolean theLogProgress) {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() <= start + theMillis) {
try {
long timeSinceStarted = System.currentTimeMillis() - start;
long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
if (theLogProgress) {
ourLog.info("Sleeping for {}ms", timeToSleep);
}
Thread.sleep(timeToSleep);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
ourLog.error("Interrupted", e);
}
}
}
}

View File

@ -1,19 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<artifactId>hapi-fhir-jpaserver-batch</artifactId>
<artifactId>hapi-fhir-batch</artifactId>
<packaging>jar</packaging>
<name>HAPI FHIR JPA Server - Batch Task Processor</name>
<description>Default implementation of batch job submitter along with constants used by the different hapi-fhir batch
jobs.
</description>
<dependencies>
<dependency>
@ -24,10 +27,6 @@
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>

View File

@ -30,7 +30,7 @@ public interface IBatchJobSubmitter {
/**
* Given a {@link Job} and a {@link JobParameters}, execute the job with the given parameters.
*
* @param theJob the job to run.
* @param theJob the job to run.
* @param theJobParameters A collection of key-value pairs that are used to parameterize the job.
* @return A {@link JobExecution} representing the job.
* @throws JobParametersInvalidException If validation on the parameters fails.

View File

@ -0,0 +1,81 @@
package ca.uhn.fhir.jpa.batch.config;
/*-
* #%L
* HAPI FHIR JPA Server - Batch Task Processor
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public final class BatchConstants {
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
public static final String JOB_UUID_PARAMETER = "jobUUID";
public static final String JOB_LAUNCHING_TASK_EXECUTOR = "jobLaunchingTaskExecutor";
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
public static final String BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP = "bulkExportGenerateResourceFilesStep";
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
public static final String BULK_IMPORT_PROCESSING_STEP = "bulkImportProcessingStep";
/**
* Delete Expunge
*/
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
/**
* Reindex
*/
public static final String REINDEX_JOB_NAME = "reindexJob";
/**
* Reindex Everything
*/
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
/**
* MDM Clear
*/
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
/**
* This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the
* number of processed records.
* <p>
* This is provided since a job might have multiple steps that the same data passes
* through, so you can't just sum up the total of all of them.
* <p>
* For any given batch job type, there should only be one step name in this set
*/
public static Set<String> RECORD_PROCESSING_STEP_NAMES;
static {
HashSet<String> recordProcessingStepNames = new HashSet<>();
recordProcessingStepNames.add(BatchConstants.BULK_IMPORT_PROCESSING_STEP);
recordProcessingStepNames.add(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP);
BatchConstants.RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
}
/**
* v * Non instantiable
*/
private BatchConstants() {
}
}

View File

@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.batch.config;
* #L%
*/
import ca.uhn.fhir.jpa.batch.BatchConstants;
import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean;

View File

@ -46,7 +46,7 @@ public class BatchJobSubmitterImpl implements IBatchJobSubmitter {
private JobRepository myJobRepository;
@Override
public JobExecution runJob(Job theJob, JobParameters theJobParameters) throws JobParametersInvalidException{
public JobExecution runJob(Job theJob, JobParameters theJobParameters) throws JobParametersInvalidException {
try {
return myJobLauncher.run(theJob, theJobParameters);
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) {

View File

@ -22,7 +22,6 @@ public class BatchJobConfig {
private StepBuilderFactory myStepBuilderFactory;
@Bean
public Job testJob() {
return myJobBuilderFactory.get("testJob")

View File

@ -28,6 +28,7 @@ public class TestBatchConfig {
asyncTaskExecutor.initialize();
return asyncTaskExecutor;
}
@Bean
public BatchConfigurer batchConfigurer() {
return new NonPersistedBatchConfigurer();

View File

@ -8,47 +8,50 @@
</encoder>
</appender>
<logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" additivity="false"
level="info">
<appender-ref ref="STDOUT"/>
</logger>
<logger name="ca.uhn.fhir.jpa.dao.FhirResourceDaoSubscriptionDstu2" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.eclipse.jetty.websocket" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.eclipse" additivity="false" level="error">
</logger>
<logger name="ca.uhn.fhir.rest.client" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<logger name="ca.uhn.fhir.jpa.dao" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<!-- Set to 'trace' to enable SQL logging -->
<logger name="org.hibernate.SQL" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<!-- Set to 'trace' to enable SQL Value logging -->
<logger name="org.hibernate.type" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.hibernate" additivity="false" level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</logger>
<appender name="BATCH_TROUBLESHOOTING" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"><level>INFO</level></filter>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<file>${smile.basedir}/log/batch-troubleshooting.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>${smile.basedir}/log/batch-troubleshooting.log.%i.gz</fileNamePattern>
@ -59,7 +62,8 @@
<maxFileSize>5MB</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n${log.stackfilter.pattern}</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n${log.stackfilter.pattern}
</pattern>
</encoder>
</appender>
<logger name="ca.uhn.fhir.log.batch_troubleshooting" level="TRACE">
@ -68,7 +72,7 @@
<root level="info">
<appender-ref ref="STDOUT" />
<appender-ref ref="STDOUT"/>
</root>
</configuration>

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -133,7 +133,7 @@
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
<artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -35,7 +35,7 @@
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
<artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -39,6 +39,7 @@ import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Patient;
import java.util.Collections;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -100,7 +101,7 @@ public class AuthorizationInterceptors {
.denyAll()
.build();
}
// If the user is an admin, allow everything
if (userIsAdmin) {
return new RuleBuilder()
@ -205,6 +206,19 @@ public class AuthorizationInterceptors {
};
//END SNIPPET: bulkExport
//START SNIPPET: advancedCompartment
new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
AdditionalCompartmentSearchParameters additionalSearchParams = new AdditionalCompartmentSearchParameters();
additionalSearchParams.addSearchParameters("device:patient", "device:subject");
return new RuleBuilder()
.allow().read().allResources().inCompartmentWithAdditionalSearchParams("Patient", new IdType("Patient/123"), additionalSearchParams)
.build();
}
};
//END SNIPPET: advancedCompartment
}

View File

@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import org.springframework.context.ApplicationContext;
import javax.annotation.Nonnull;
import java.util.List;
@SuppressWarnings("unused")
@ -121,8 +120,10 @@ public class RepositoryValidatingInterceptorExamples {
.forResourcesOfType("Patient")
.requireValidationToDeclaredProfiles()
// Configure the validator to never reject extensions
.allowAnyExtensions()
// Configure the validator to reject unknown extensions
// by default, all extensions are accepted and to undo this rejection
// call allowAnyExtensions()
.rejectUnknownExtensions()
// Configure the validator to not perform terminology validation
.disableTerminologyChecks()

View File

@ -1,5 +1,5 @@
---
type: change
issue: 2446
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
now defaults to `true`."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 2850
title: "Updated handling of MDM_AFTER_PERSISTED_RESOURCE_CHECKED pointcut to include additional MDM related info."

View File

@ -0,0 +1,5 @@
---
type: add
jira: SMILE-1118
title: "Add new RuleBuilder options which allow you to specify additional resources and search parameters which match a given compartment. More explanations of
the enhancements can be found in [the documentation](/hapi-fhir/docs/security/authorization_interceptor.html#advanced-compartment-authorization)."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2995
title: "CodeSystem version is copied to ValueSet.compose.include.version on loinc terminology upload
to support versioned ValueSet expansion."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 3005
jira: SMILE-723
title: "Open up the visibility of some methods in the generation of the Open API definition files to allow extenders to add support for OIDC authorization."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 3012
title: "Fixes migration of VARBINARY columns on MS SQLServer."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 3110
title: "Added a functionality to deny unknown extensions."

View File

@ -9,7 +9,7 @@ See the [Modules Page](/docs/getting_started/modules.html) for more information
* [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5
* [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client
* [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server
* [JPA Server - API](/apidocs/hapi-fhir-jpaserver-api/) - hapi-fhir-jpaserver-api
* [JPA Server - API](/apidocs/hapi-fhir-storage-api/) - hapi-fhir-storage-api
* [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model
* [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base
* [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter

View File

@ -83,3 +83,14 @@ AuthorizationInterceptor can be used to provide nuanced control over the kinds o
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java|bulkExport}}
```
# Advanced Compartment authorization
AuthorizationInterceptor can be used to provide fine-grained control over compartment reads and writes as well. There is a strict FHIR definition
of which resources and related search parameters fall into a given compartment. However, sometimes the defaults do not suffice. The following is an example
of an R4 ruleset which allows `device.patient` to be considered in the Patient compartment, on top of all the standard search parameters.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java|advancedCompartment}}
```

View File

@ -54,9 +54,15 @@ In some cases, you may have references which are <i>Logical References</i>,
which means that they act as an identifier and not necessarily as a literal
web address.
A common use for logical references is in references to conformance resources, such as ValueSets, StructureDefinitions, etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/quantity-comparator` from your own resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve, but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL.
A common use for logical references is in references to conformance resources, such as ValueSets, StructureDefinitions,
etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/quantity-comparator` from your own
resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve,
but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL.
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical property (see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set))).
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
.
For example:
@ -131,5 +137,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property.

View File

@ -24,9 +24,13 @@ The grouping of Observation resources by `Observation.code` means that the `$las
# Deployment and Configuration
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled property (see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean))).
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
.
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters specifying how to connect to the Elasticsearch server, for e.g.:
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters
specifying how to connect to the Elasticsearch server, for e.g.:
```java
@Bean()

View File

@ -240,9 +240,18 @@ The complete raw contents of the resource is stored in the `RES_TEXT` column, us
By default, the **HFJ_RESOURCE.RES_ID** column is used as the resource ID for all server-assigned IDs. For example, if a Patient resource is created in a completely empty database, it will be assigned the ID `Patient/1` by the server and RES_ID will have a value of 1.
However, when client-assigned IDs are used, these may contain text values to allow a client to create an ID such as `Patient/ABC`. When a client-assigned ID is given to a resource, a row is created in the **HFJ_RESOURCE** table. When an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE** row, the RES_ID value is no longer visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
However, when client-assigned IDs are used, these may contain text values to allow a client to create an ID such
as `Patient/ABC`. When a client-assigned ID is given to a resource, a row is created in the **HFJ_RESOURCE** table. When
an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE** row, the RES_ID value is no longer
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
If the server has been configured with a [Resource Server ID Strategy](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) of [UUID](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the server has been configured with a [Resource Client ID Strategy](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum)) of [ANY](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY) the server will create a Forced ID for all resources (not only resources having textual IDs).
If the server has been configured with
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
of [UUID](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or
the server has been configured with
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
of [ANY](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
the server will create a Forced ID for all resources (not only resources having textual IDs).
## Columns

View File

@ -25,7 +25,10 @@ One important caveat is that chaining is currently not supported when using this
## Enabling MDM Expansion
On top of needing to instantiate an MDM module, you must enable this feature in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property.
On top of needing to instantiate an MDM module, you must enable this feature in
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
property.
<div class="helpWarningCalloutBox">
It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. Use with caution.

View File

@ -601,7 +601,7 @@ This operation takes two optional Parameters.
<td>0..1</td>
<td>
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property.
</td>
</tr>

View File

@ -53,9 +53,12 @@ In a partitioned repository, it is important to understand that only a single po
This fact can have security implications:
* A client might be blocked from creating `Patient/ABC` in the partition they have access to because this ID is already in use in another partition.
* A client might be blocked from creating `Patient/ABC` in the partition they have access to because this ID is already
in use in another partition.
* In a server using the default configuration of SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
* In a server using the default configuration of
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -118,7 +118,7 @@
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-api</artifactId>
<artifactId>hapi-fhir-storage-api</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

119
hapi-fhir-jpa/pom.xml Normal file
View File

@ -0,0 +1,119 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hapi-fhir-jpa</artifactId>
<description>This project contains utility classes for working with spring-hibernate jpa and the Quartz Scheduler.
</description>
<dependencies>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-base</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</dependency>
<!-- for date logging -->
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-r4</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Hibernate -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-java8</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml</groupId>
<artifactId>classmate</artifactId>
</exclusion>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.search</groupId>
<artifactId>hibernate-search-mapper-orm</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<exclusions>
<exclusion>
<artifactId>xml-apis</artifactId>
<groupId>xml-apis</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
</dependency>
<dependency>
<groupId>net.ttddyy</groupId>
<artifactId>datasource-proxy</artifactId>
</dependency>
<!-- Quartz Scheduler -->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
</dependency>
<!-- Apache -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.config;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
@ -23,7 +23,6 @@ package ca.uhn.fhir.jpa.config;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.query.criteria.LiteralHandlingMode;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.orm.hibernate5.SpringBeanContainer;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR JPA Model
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR JPA Model
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR JPA Model
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR JPA Model
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR JPA Model
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
@ -20,8 +20,7 @@ package ca.uhn.fhir.jpa.sched;
* #L%
*/
import org.hl7.fhir.r5.model.InstantType;
import org.hl7.fhir.utilities.DateTimeUtil;
import org.hl7.fhir.r4.model.InstantType;
import org.quartz.JobKey;
import org.quartz.spi.TriggerFiredBundle;
import org.slf4j.Logger;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
@ -27,7 +27,15 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.Validate;
import org.quartz.*;
import org.quartz.JobDataMap;
import org.quartz.JobKey;
import org.quartz.ScheduleBuilder;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.SimpleScheduleBuilder;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.TriggerKey;
import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.StdSchedulerFactory;
import org.quartz.impl.matchers.GroupMatcher;
@ -41,8 +49,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.quartz.impl.StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME;
public abstract class BaseHapiScheduler implements IHapiScheduler {
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class);
@ -96,7 +102,7 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
protected void setProperties() {
addProperty("org.quartz.threadPool.threadCount", "4");
myProperties.setProperty(PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId());
myProperties.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId());
addProperty("org.quartz.threadPool.threadNamePrefix", getThreadPrefix());
}

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
@ -24,7 +24,7 @@ import ca.uhn.fhir.util.StopWatch;
import com.google.common.collect.Queues;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.hl7.fhir.dstu3.model.InstantType;
import org.hl7.fhir.r4.model.InstantType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-jpa
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.util;
* #L%
*/
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.collect.ImmutableSet;
@ -221,6 +220,14 @@ public class TestUtil {
}
for (Class<?> innerClass : theClazz.getDeclaredClasses()) {
Embeddable embeddable = innerClass.getAnnotation(Embeddable.class);
if (embeddable != null) {
scanClassOrSuperclass(theNames, innerClass, false, columnNameToLength);
}
}
if (theClazz.getSuperclass().equals(Object.class)) {
return;
}
@ -347,16 +354,12 @@ public class TestUtil {
}
}
public static void sleepAtLeast(long theMillis) {
HapiTransactionService.sleepAtLeast(theMillis, true);
}
public static InstantType getTimestamp(IBaseResource resource) {
return new InstantType(new Date(resource.getMeta().getLastUpdated().getTime()));
}
public static void sleepOneClick() {
sleepAtLeast(1);
ca.uhn.fhir.util.TestUtil.sleepAtLeast(1);
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -90,6 +90,11 @@
<artifactId>hapi-fhir-jpaserver-searchparam</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-model</artifactId>
@ -147,13 +152,9 @@
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-batch</artifactId>
<artifactId>hapi-fhir-batch</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>net.ttddyy</groupId>
<artifactId>datasource-proxy</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
@ -307,12 +308,6 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
@ -324,24 +319,6 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<exclusions>
<exclusion>
<artifactId>xml-apis</artifactId>
<groupId>xml-apis</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
@ -376,40 +353,6 @@
<groupId>org.springframework</groupId>
<artifactId>spring-websocket</artifactId>
</dependency>
<!-- Hibernate -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-java8</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml</groupId>
<artifactId>classmate</artifactId>
</exclusion>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>javax.el</groupId>
<artifactId>javax.el-api</artifactId>
@ -426,16 +369,6 @@
<artifactId>log4j-to-slf4j</artifactId>
</dependency>
<!-- Hibernate Search -->
<dependency>
<groupId>org.hibernate.search</groupId>
<artifactId>hibernate-search-mapper-orm</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
@ -467,10 +400,6 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>

View File

@ -29,10 +29,6 @@ import ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@Configuration
//When you define a new batch job, add it here.
@Import({
@ -45,60 +41,4 @@ import java.util.Set;
MdmClearJobConfig.class
})
public class BatchJobsConfig {
/*
* Bulk Export
*/
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
public static final String BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP = "bulkExportGenerateResourceFilesStep";
/*
* Bulk Import
*/
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
public static final String BULK_IMPORT_PROCESSING_STEP = "bulkImportProcessingStep";
/**
* This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the
* number of processed records.
*
* This is provided since a job might have multiple steps that the same data passes
* through, so you can't just sum up the total of all of them.
*
* For any given batch job type, there should only be one step name in this set
*/
public static final Set<String> RECORD_PROCESSING_STEP_NAMES;
static {
HashSet<String> recordProcessingStepNames = new HashSet<>();
recordProcessingStepNames.add(BULK_IMPORT_PROCESSING_STEP);
recordProcessingStepNames.add(BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP);
RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
}
/**
* Delete Expunge
*/
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
/**
* Reindex
*/
public static final String REINDEX_JOB_NAME = "reindexJob";
/**
* Reindex Everything
*/
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
/**
* MDM Clear
*/
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.batch.job;
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -29,8 +30,6 @@ import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator;
import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST;
/**
* This class will prevent a job from running any of the provided URLs are not valid on this server.
*/
@ -50,7 +49,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("This job requires Parameters: [urlList]");
}
RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(JOB_PARAM_REQUEST_LIST));
RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(BatchConstants.JOB_PARAM_REQUEST_LIST));
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
String url = partitionedUrl.getUrl();
try {
@ -60,7 +59,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
}
} catch (UnsupportedOperationException e) {
throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + BatchConstants.JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
}
}
}

View File

@ -24,8 +24,8 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
@ -55,7 +55,7 @@ public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME)
@Qualifier(BatchConstants.MDM_CLEAR_JOB_NAME)
private Job myMdmClearJob;
@Override

View File

@ -41,7 +41,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.ArrayList;
import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.MDM_CLEAR_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
@ -58,22 +59,17 @@ import java.util.function.Function;
/**
* This Spring Batch reader takes 4 parameters:
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
* {@link BatchConstants#JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link BatchConstants#JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link BatchConstants#JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
* <p>
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* The reader will return at most {@link BatchConstants#JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching entities are available. It returns the resources in reverse chronological order
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
* and stores where it's at in the Spring Batch execution context with the key {@link BatchConstants#CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
@ -88,30 +84,30 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
private Date myStartTime;
private static String highKey(int theIndex) {
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
return BatchConstants.CURRENT_THRESHOLD_HIGH + "." + theIndex;
}
@Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map<String, JobParameter> map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
map.put(BatchConstants.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(BatchConstants.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
map.put(BatchConstants.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
@Autowired
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
public void setRequestListJson(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls();
}
@Autowired
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
public void setStartTime(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
@ -166,8 +162,8 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
if (executionContext.containsKey(BatchConstants.CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(BatchConstants.CURRENT_URL_INDEX)).intValue();
}
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index);
@ -181,7 +177,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
executionContext.putLong(BatchConstants.CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) {
@ -199,7 +195,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
}
@Autowired
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
public void setBatchSize(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@ -57,7 +58,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
@Value("#{stepExecutionContext['resourceType']}")
protected String myResourceType;
@Value("#{jobExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
@Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
protected String myJobUUID;
@Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}")
protected Long myReadChunkSize;

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.ExitStatus;
@ -49,9 +50,9 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription();

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
@ -51,7 +51,6 @@ import java.util.List;
@Configuration
public class BulkExportJobConfig {
public static final String JOB_UUID_PARAMETER = "jobUUID";
public static final String READ_CHUNK_PARAMETER = "readChunkSize";
public static final String EXPAND_MDM_PARAMETER = "expandMdm";
public static final String GROUP_ID_PARAMETER = "groupId";
@ -88,7 +87,7 @@ public class BulkExportJobConfig {
@Bean
@Lazy
public Job bulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
return myJobBuilderFactory.get(BatchConstants.BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
.next(bulkExportPartitionStep())
@ -111,7 +110,7 @@ public class BulkExportJobConfig {
@Bean
@Lazy
public Job groupBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
return myJobBuilderFactory.get(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
.validator(groupBulkJobParameterValidator())
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
@ -123,7 +122,7 @@ public class BulkExportJobConfig {
@Bean
@Lazy
public Job patientBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
return myJobBuilderFactory.get(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
.next(patientPartitionStep())
@ -169,7 +168,7 @@ public class BulkExportJobConfig {
@Bean
public Step bulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP)
return myStepBuilderFactory.get(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP)
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(bulkItemReader())
.processor(myPidToIBaseResourceProcessor)
@ -217,7 +216,7 @@ public class BulkExportJobConfig {
@Bean
public Step bulkExportPartitionStep() {
return myStepBuilderFactory.get("partitionStep")
.partitioner(BatchJobsConfig.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
.partitioner(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
.step(bulkExportGenerateResourceFilesStep())
.build();
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.rest.api.Constants;
@ -34,8 +35,6 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.util.Arrays;
import java.util.Optional;
import static org.slf4j.LoggerFactory.getLogger;
/**
* This class will prevent a job from running if the UUID does not exist or is invalid.
*/
@ -59,7 +58,7 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
if (readChunkSize == null || readChunkSize < 1) {
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
}
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");

View File

@ -20,10 +20,11 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
@ -42,18 +43,27 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext
.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString(BatchConstants.JOB_UUID_PARAMETER, theJobUUID);
}
@Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
if (jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER)) {
addUUIDToJobContext(theChunkContext, (String)jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
if (jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER)) {
addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
} else {
String resourceTypes = (String)jobParameters.get("resourceTypes");
Date since = (Date)jobParameters.get("since");
String filters = (String)jobParameters.get("filters");
String resourceTypes = (String) jobParameters.get("resourceTypes");
Date since = (Date) jobParameters.get("since");
String filters = (String) jobParameters.get("filters");
Set<String> filterSet;
if (StringUtils.isBlank(filters)) {
filterSet = null;
@ -86,13 +96,4 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
return RepeatStatus.FINISHED;
}
}
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext
.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUUID);
}
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner;
@ -60,7 +61,7 @@ public class ResourceTypePartitioner implements Partitioner {
// The worker step needs to know which parent job it is processing for, and which collection entity it will be
// attaching its results to.
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putLong("bulkExportCollectionEntityId", collectionEntityId);
// Name the partition based on the resource type

View File

@ -30,9 +30,8 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
@ -49,6 +48,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -114,15 +114,15 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private IBatchJobSubmitter myJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
@Qualifier(BatchConstants.BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkExportJob;
@Autowired
@Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
@Qualifier(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myGroupBulkExportJob;
@Autowired
@Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
@Qualifier(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myPatientBulkExportJob;
private Set<String> myCompartmentResources;
@ -243,7 +243,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private void processJob(BulkExportJobEntity theBulkExportJobEntity) {
String theJobUuid = theBulkExportJobEntity.getJobId();
JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUuid)
.addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);

View File

@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.elasticsearch.client.enrich.ExecutePolicyResponse;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
@ -39,7 +38,7 @@ public class ActivateBulkImportEntityStepListener implements StepExecutionListen
@Override
public void beforeStep(StepExecution theStepExecution) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid != null) {
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
}

View File

@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
@ -42,7 +42,7 @@ public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord>
private IBulkDataImportSvc myBulkDataImportSvc;
@Autowired
private FhirContext myFhirContext;
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
@Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;

View File

@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
@ -39,7 +39,7 @@ import java.util.List;
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
@Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.BatchStatus;
@ -36,7 +36,7 @@ import org.springframework.beans.factory.annotation.Value;
*/
public class BulkImportJobCloser implements Tasklet {
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
@Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired

View File

@ -21,14 +21,13 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
@ -50,8 +49,8 @@ import org.springframework.retry.policy.TimeoutRetryPolicy;
import javax.batch.api.chunk.listener.RetryProcessListener;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_PROCESSING_STEP;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_PROCESSING_STEP;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import org.apache.commons.lang3.StringUtils;
@ -52,7 +52,7 @@ public class BulkImportJobParameterValidator implements JobParametersValidator {
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder();
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: ");

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.slf4j.Logger;
@ -43,7 +43,7 @@ public class BulkImportPartitioner implements Partitioner {
private static final Logger ourLog = getLogger(BulkImportPartitioner.class);
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
@Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired
@ -61,7 +61,7 @@ public class BulkImportPartitioner implements Partitioner {
String fileDescription = myBulkDataImportSvc.getFileDescription(myJobUUID, i);
ExecutionContext context = new ExecutionContext();
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putInt(FILE_INDEX, i);
context.put(ROW_PROCESSING_MODE, job.getProcessingMode());
context.put(JOB_DESCRIPTION, job.getJobDescription());

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.ExitStatus;
@ -52,9 +52,9 @@ public class BulkImportStepListener implements StepExecutionListener, RetryListe
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
import ca.uhn.fhir.util.ValidateUtil;
import org.springframework.batch.core.StepContribution;
@ -37,8 +37,8 @@ public class CreateBulkImportEntityTasklet implements Tasklet {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
}

View File

@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.svc;
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
@ -79,7 +79,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
@Autowired
private IBatchJobSubmitter myJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME)
@Qualifier(BatchConstants.BULK_IMPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkImportJob;
@Autowired
private DaoConfig myDaoConfig;
@ -271,7 +271,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive");
JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId)
.addString(BatchConstants.JOB_UUID_PARAMETER, jobId)
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
if (isNotBlank(theBulkExportJobEntity.getJobDescription())) {

View File

@ -11,9 +11,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.slf4j.Logger;
@ -118,7 +119,7 @@ public class HapiTransactionService {
theTransactionDetails.clearUserData(BaseHapiFhirDao.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS);
double sleepAmount = (250.0d * i) * Math.random();
long sleepAmountLong = (long) sleepAmount;
sleepAtLeast(sleepAmountLong, false);
TestUtil.sleepAtLeast(sleepAmountLong, false);
ourLog.info("About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", sleepAmountLong);
continue;
@ -164,22 +165,4 @@ public class HapiTransactionService {
super(theThrowable);
}
}
@SuppressWarnings("BusyWait")
public static void sleepAtLeast(long theMillis, boolean theLogProgress) {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() <= start + theMillis) {
try {
long timeSinceStarted = System.currentTimeMillis() - start;
long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
if (theLogProgress) {
ourLog.info("Sleeping for {}ms", timeToSleep);
}
Thread.sleep(timeToSleep);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
ourLog.error("Interrupted", e);
}
}
}
}

View File

@ -25,8 +25,8 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
@ -52,7 +52,7 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME)
@Qualifier(BatchConstants.DELETE_EXPUNGE_JOB_NAME)
private Job myDeleteExpungeJob;
@Autowired
FhirContext myFhirContext;

View File

@ -20,12 +20,10 @@ package ca.uhn.fhir.jpa.delete.job;
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
@ -39,7 +37,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.DELETE_EXPUNGE_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmLink;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -47,7 +48,7 @@ import java.util.Date;
@Table(name = "MPI_LINK", uniqueConstraints = {
@UniqueConstraint(name = "IDX_EMPI_PERSON_TGT", columnNames = {"PERSON_PID", "TARGET_PID"}),
})
public class MdmLink {
public class MdmLink implements IMdmLink {
public static final int VERSION_LENGTH = 16;
private static final int MATCH_RESULT_LENGTH = 16;
private static final int LINK_SOURCE_LENGTH = 16;

View File

@ -29,9 +29,7 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ReferenceParam;
import joptsimple.internal.Strings;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@ -39,8 +37,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static org.slf4j.LoggerFactory.getLogger;
/**
* This interceptor replaces the auto-generated CapabilityStatement that is generated
* by the HAPI FHIR Server with a static hard-coded resource.

View File

@ -305,6 +305,15 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot {
return this;
}
/**
* Configure the validator to reject unknown extensions
*/
@Nonnull
public FinalizedRequireValidationRule rejectUnknownExtensions() {
myRule.getValidator().setAnyExtensionsAllowed(false);
return this;
}
/**
* Configure the validator to not perform terminology validation
*/

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.tasks;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%

View File

@ -50,13 +50,13 @@ public class BaseJpaSystemProvider<T, MT> extends BaseJpaProvider implements IJp
* @deprecated
*/
@Deprecated
public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = "$mark-all-resources-for-reindexing";
public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = ProviderConstants.MARK_ALL_RESOURCES_FOR_REINDEXING;
/**
* @see ProviderConstants#OPERATION_REINDEX
* @deprecated
*/
@Deprecated
public static final String PERFORM_REINDEXING_PASS = "$perform-reindexing-pass";
public static final String PERFORM_REINDEXING_PASS = ProviderConstants.PERFORM_REINDEXING_PASS;
private IFhirSystemDao<T, MT> myDao;
@Autowired

Some files were not shown because too many files have changed in this diff Show More