hapi-fhir-storage-api (#3006)

* rename hapi-fhir-jpaserver-migrate to hapi-fhir-sql-migrate and move hapi-tasks into jpaserver-persistence

* rename hapi-fhir-jpaserver-api to hapi-fhir-storage-api

* move bulk import apis

* create hapi-fhir-jpa

* create hapi-fhir-jpa

* move CircularQueueCaptureQueriesListener to japi-fhir-jpa

* move mdm logs to storage-api

* move gziputil to storage-api

* move quartz scheduling to hapi-fhir-jpa

* move default subscription channel factory to storage-api

* consolidated batch constants

* correct accidental Logs change

* remove dependency of cdr-api on cdr-persistence

* fix javadoc

* pom descriptions

* review feedback
This commit is contained in:
Ken Stevens 2021-09-21 11:36:08 -04:00 committed by GitHub
parent f1f9c672ad
commit 060791aeb4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
289 changed files with 1489 additions and 1375 deletions

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -18,6 +18,9 @@
<dependencies> <dependencies>
<!-- IMPORTANT NOTE: This pom is used by multiple platforms (including Android) so please avoid adding any new
dependencies here. -->
<!-- JSON --> <!-- JSON -->
<dependency> <dependency>
<groupId>com.fasterxml.jackson.datatype</groupId> <groupId>com.fasterxml.jackson.datatype</groupId>

View File

@ -192,4 +192,26 @@ public class TestUtil {
return stripReturns(theString).replace(" ", ""); return stripReturns(theString).replace(" ", "");
} }
public static void sleepAtLeast(long theMillis) {
sleepAtLeast(theMillis, true);
}
@SuppressWarnings("BusyWait")
public static void sleepAtLeast(long theMillis, boolean theLogProgress) {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() <= start + theMillis) {
try {
long timeSinceStarted = System.currentTimeMillis() - start;
long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
if (theLogProgress) {
ourLog.info("Sleeping for {}ms", timeToSleep);
}
Thread.sleep(timeToSleep);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
ourLog.error("Interrupted", e);
}
}
}
} }

View File

@ -1,19 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<artifactId>hapi-fhir-jpaserver-batch</artifactId> <artifactId>hapi-fhir-batch</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>
<name>HAPI FHIR JPA Server - Batch Task Processor</name> <name>HAPI FHIR JPA Server - Batch Task Processor</name>
<description>Default implementation of batch job submitter along with constants used by the different hapi-fhir batch
jobs.
</description>
<dependencies> <dependencies>
<dependency> <dependency>
@ -24,10 +27,6 @@
<groupId>org.springframework.batch</groupId> <groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId> <artifactId>spring-batch-infrastructure</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
</dependency>
<dependency> <dependency>
<groupId>javax.annotation</groupId> <groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId> <artifactId>javax.annotation-api</artifactId>

View File

@ -30,7 +30,7 @@ public interface IBatchJobSubmitter {
/** /**
* Given a {@link Job} and a {@link JobParameters}, execute the job with the given parameters. * Given a {@link Job} and a {@link JobParameters}, execute the job with the given parameters.
* *
* @param theJob the job to run. * @param theJob the job to run.
* @param theJobParameters A collection of key-value pairs that are used to parameterize the job. * @param theJobParameters A collection of key-value pairs that are used to parameterize the job.
* @return A {@link JobExecution} representing the job. * @return A {@link JobExecution} representing the job.
* @throws JobParametersInvalidException If validation on the parameters fails. * @throws JobParametersInvalidException If validation on the parameters fails.

View File

@ -0,0 +1,81 @@
package ca.uhn.fhir.jpa.batch.config;
/*-
* #%L
* HAPI FHIR JPA Server - Batch Task Processor
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public final class BatchConstants {
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
public static final String JOB_UUID_PARAMETER = "jobUUID";
public static final String JOB_LAUNCHING_TASK_EXECUTOR = "jobLaunchingTaskExecutor";
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
public static final String BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP = "bulkExportGenerateResourceFilesStep";
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
public static final String BULK_IMPORT_PROCESSING_STEP = "bulkImportProcessingStep";
/**
* Delete Expunge
*/
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
/**
* Reindex
*/
public static final String REINDEX_JOB_NAME = "reindexJob";
/**
* Reindex Everything
*/
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
/**
* MDM Clear
*/
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
/**
* This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the
* number of processed records.
* <p>
* This is provided since a job might have multiple steps that the same data passes
* through, so you can't just sum up the total of all of them.
* <p>
* For any given batch job type, there should only be one step name in this set
*/
public static Set<String> RECORD_PROCESSING_STEP_NAMES;
static {
HashSet<String> recordProcessingStepNames = new HashSet<>();
recordProcessingStepNames.add(BatchConstants.BULK_IMPORT_PROCESSING_STEP);
recordProcessingStepNames.add(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP);
BatchConstants.RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
}
/**
* v * Non instantiable
*/
private BatchConstants() {
}
}

View File

@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.batch.config;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.batch.BatchConstants;
import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer; import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer;
import org.springframework.batch.core.explore.JobExplorer; import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean;

View File

@ -46,7 +46,7 @@ public class BatchJobSubmitterImpl implements IBatchJobSubmitter {
private JobRepository myJobRepository; private JobRepository myJobRepository;
@Override @Override
public JobExecution runJob(Job theJob, JobParameters theJobParameters) throws JobParametersInvalidException{ public JobExecution runJob(Job theJob, JobParameters theJobParameters) throws JobParametersInvalidException {
try { try {
return myJobLauncher.run(theJob, theJobParameters); return myJobLauncher.run(theJob, theJobParameters);
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) { } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) {

View File

@ -22,7 +22,6 @@ public class BatchJobConfig {
private StepBuilderFactory myStepBuilderFactory; private StepBuilderFactory myStepBuilderFactory;
@Bean @Bean
public Job testJob() { public Job testJob() {
return myJobBuilderFactory.get("testJob") return myJobBuilderFactory.get("testJob")

View File

@ -28,6 +28,7 @@ public class TestBatchConfig {
asyncTaskExecutor.initialize(); asyncTaskExecutor.initialize();
return asyncTaskExecutor; return asyncTaskExecutor;
} }
@Bean @Bean
public BatchConfigurer batchConfigurer() { public BatchConfigurer batchConfigurer() {
return new NonPersistedBatchConfigurer(); return new NonPersistedBatchConfigurer();

View File

@ -8,47 +8,50 @@
</encoder> </encoder>
</appender> </appender>
<logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" additivity="false" level="info"> <logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" additivity="false"
<appender-ref ref="STDOUT" /> level="info">
<appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="ca.uhn.fhir.jpa.dao.FhirResourceDaoSubscriptionDstu2" additivity="false" level="info"> <logger name="ca.uhn.fhir.jpa.dao.FhirResourceDaoSubscriptionDstu2" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="org.eclipse.jetty.websocket" additivity="false" level="info"> <logger name="org.eclipse.jetty.websocket" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="org.eclipse" additivity="false" level="error"> <logger name="org.eclipse" additivity="false" level="error">
</logger> </logger>
<logger name="ca.uhn.fhir.rest.client" additivity="false" level="info"> <logger name="ca.uhn.fhir.rest.client" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="ca.uhn.fhir.jpa.dao" additivity="false" level="info"> <logger name="ca.uhn.fhir.jpa.dao" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<!-- Set to 'trace' to enable SQL logging --> <!-- Set to 'trace' to enable SQL logging -->
<logger name="org.hibernate.SQL" additivity="false" level="info"> <logger name="org.hibernate.SQL" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<!-- Set to 'trace' to enable SQL Value logging --> <!-- Set to 'trace' to enable SQL Value logging -->
<logger name="org.hibernate.type" additivity="false" level="info"> <logger name="org.hibernate.type" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace"> <logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<logger name="org.hibernate" additivity="false" level="info"> <logger name="org.hibernate" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</logger> </logger>
<appender name="BATCH_TROUBLESHOOTING" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="BATCH_TROUBLESHOOTING" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"><level>INFO</level></filter> <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<file>${smile.basedir}/log/batch-troubleshooting.log</file> <file>${smile.basedir}/log/batch-troubleshooting.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>${smile.basedir}/log/batch-troubleshooting.log.%i.gz</fileNamePattern> <fileNamePattern>${smile.basedir}/log/batch-troubleshooting.log.%i.gz</fileNamePattern>
@ -59,7 +62,8 @@
<maxFileSize>5MB</maxFileSize> <maxFileSize>5MB</maxFileSize>
</triggeringPolicy> </triggeringPolicy>
<encoder> <encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n${log.stackfilter.pattern}</pattern> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n${log.stackfilter.pattern}
</pattern>
</encoder> </encoder>
</appender> </appender>
<logger name="ca.uhn.fhir.log.batch_troubleshooting" level="TRACE"> <logger name="ca.uhn.fhir.log.batch_troubleshooting" level="TRACE">
@ -68,7 +72,7 @@
<root level="info"> <root level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT"/>
</root> </root>
</configuration> </configuration>

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -133,7 +133,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>${project.groupId}</groupId> <groupId>${project.groupId}</groupId>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId> <artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -35,7 +35,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId> <artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -1,5 +1,5 @@
--- ---
type: change type: change
issue: 2446 issue: 2446
title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean)) title: "DaoConfig setting for [Populate Identifier In Auto Created Placeholder Reference Targets](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean))
now defaults to `true`." now defaults to `true`."

View File

@ -9,7 +9,7 @@ See the [Modules Page](/docs/getting_started/modules.html) for more information
* [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5 * [Model API (R5)](/apidocs/hapi-fhir-structures-r5/) - hapi-fhir-structures-r5
* [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client * [Client API](/apidocs/hapi-fhir-client/) - hapi-fhir-client
* [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server * [Plain Server API](/apidocs/hapi-fhir-server/) - hapi-fhir-server
* [JPA Server - API](/apidocs/hapi-fhir-jpaserver-api/) - hapi-fhir-jpaserver-api * [JPA Server - API](/apidocs/hapi-fhir-storage-api/) - hapi-fhir-storage-api
* [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model * [JPA Server - Model](/apidocs/hapi-fhir-jpaserver-model/) - hapi-fhir-jpaserver-model
* [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base * [JPA Server - Base](/apidocs/hapi-fhir-jpaserver-base/) - hapi-fhir-jpaserver-base
* [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter * [Version Converter API](/apidocs/hapi-fhir-converter/) - hapi-fhir-converter

View File

@ -54,9 +54,15 @@ In some cases, you may have references which are <i>Logical References</i>,
which means that they act as an identifier and not necessarily as a literal which means that they act as an identifier and not necessarily as a literal
web address. web address.
A common use for logical references is in references to conformance resources, such as ValueSets, StructureDefinitions, etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/quantity-comparator` from your own resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve, but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL. A common use for logical references is in references to conformance resources, such as ValueSets, StructureDefinitions,
etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/quantity-comparator` from your own
resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve,
but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL.
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical property (see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set))). HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
.
For example: For example:
@ -131,5 +137,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value ?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize()) of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property. property.

View File

@ -24,9 +24,13 @@ The grouping of Observation resources by `Observation.code` means that the `$las
# Deployment and Configuration # Deployment and Configuration
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled property (see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean))). The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
property (
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
.
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters specifying how to connect to the Elasticsearch server, for e.g.: In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters
specifying how to connect to the Elasticsearch server, for e.g.:
```java ```java
@Bean() @Bean()

View File

@ -240,9 +240,18 @@ The complete raw contents of the resource is stored in the `RES_TEXT` column, us
By default, the **HFJ_RESOURCE.RES_ID** column is used as the resource ID for all server-assigned IDs. For example, if a Patient resource is created in a completely empty database, it will be assigned the ID `Patient/1` by the server and RES_ID will have a value of 1. By default, the **HFJ_RESOURCE.RES_ID** column is used as the resource ID for all server-assigned IDs. For example, if a Patient resource is created in a completely empty database, it will be assigned the ID `Patient/1` by the server and RES_ID will have a value of 1.
However, when client-assigned IDs are used, these may contain text values to allow a client to create an ID such as `Patient/ABC`. When a client-assigned ID is given to a resource, a row is created in the **HFJ_RESOURCE** table. When an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE** row, the RES_ID value is no longer visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server. However, when client-assigned IDs are used, these may contain text values to allow a client to create an ID such
as `Patient/ABC`. When a client-assigned ID is given to a resource, a row is created in the **HFJ_RESOURCE** table. When
an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE** row, the RES_ID value is no longer
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
If the server has been configured with a [Resource Server ID Strategy](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) of [UUID](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the server has been configured with a [Resource Client ID Strategy](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum)) of [ANY](/apidocs/hapi-fhir-jpaserver-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY) the server will create a Forced ID for all resources (not only resources having textual IDs). If the server has been configured with
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
of [UUID](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or
the server has been configured with
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
of [ANY](/apidocs/hapi-fhir-storage-api/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
the server will create a Forced ID for all resources (not only resources having textual IDs).
## Columns ## Columns

View File

@ -25,7 +25,10 @@ One important caveat is that chaining is currently not supported when using this
## Enabling MDM Expansion ## Enabling MDM Expansion
On top of needing to instantiate an MDM module, you must enable this feature in the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean)) property. On top of needing to instantiate an MDM module, you must enable this feature in
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
property.
<div class="helpWarningCalloutBox"> <div class="helpWarningCalloutBox">
It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. Use with caution. It is important to note that enabling this functionality can lead to incorrect data being returned by a request, if your MDM links are incorrect. Use with caution.

View File

@ -601,7 +601,7 @@ This operation takes two optional Parameters.
<td>0..1</td> <td>0..1</td>
<td> <td>
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize()) of [Expunge Batch Size](/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property. property.
</td> </td>
</tr> </tr>

View File

@ -53,9 +53,12 @@ In a partitioned repository, it is important to understand that only a single po
This fact can have security implications: This fact can have security implications:
* A client might be blocked from creating `Patient/ABC` in the partition they have access to because this ID is already in use in another partition. * A client might be blocked from creating `Patient/ABC` in the partition they have access to because this ID is already
in use in another partition.
* In a server using the default configuration of SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum)) a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned. * In a server using the default configuration of
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs. These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -118,7 +118,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-api</artifactId> <artifactId>hapi-fhir-storage-api</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

119
hapi-fhir-jpa/pom.xml Normal file
View File

@ -0,0 +1,119 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hapi-fhir-jpa</artifactId>
<description>This project contains utility classes for working with spring-hibernate jpa and the Quartz Scheduler.
</description>
<dependencies>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-base</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</dependency>
<!-- for date logging -->
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-r4</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Hibernate -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-java8</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml</groupId>
<artifactId>classmate</artifactId>
</exclusion>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.search</groupId>
<artifactId>hibernate-search-mapper-orm</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<exclusions>
<exclusion>
<artifactId>xml-apis</artifactId>
<groupId>xml-apis</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
</dependency>
<dependency>
<groupId>net.ttddyy</groupId>
<artifactId>datasource-proxy</artifactId>
</dependency>
<!-- Quartz Scheduler -->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
</dependency>
<!-- Apache -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.config;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%
@ -23,7 +23,6 @@ package ca.uhn.fhir.jpa.config;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.query.criteria.LiteralHandlingMode; import org.hibernate.query.criteria.LiteralHandlingMode;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode; import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.orm.hibernate5.SpringBeanContainer; import org.springframework.orm.hibernate5.SpringBeanContainer;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Model * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Model * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Model * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Model * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Model * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%
@ -20,8 +20,7 @@ package ca.uhn.fhir.jpa.sched;
* #L% * #L%
*/ */
import org.hl7.fhir.r5.model.InstantType; import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.utilities.DateTimeUtil;
import org.quartz.JobKey; import org.quartz.JobKey;
import org.quartz.spi.TriggerFiredBundle; import org.quartz.spi.TriggerFiredBundle;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%
@ -27,7 +27,15 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.quartz.*; import org.quartz.JobDataMap;
import org.quartz.JobKey;
import org.quartz.ScheduleBuilder;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.SimpleScheduleBuilder;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.TriggerKey;
import org.quartz.impl.JobDetailImpl; import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.StdSchedulerFactory; import org.quartz.impl.StdSchedulerFactory;
import org.quartz.impl.matchers.GroupMatcher; import org.quartz.impl.matchers.GroupMatcher;
@ -41,8 +49,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.quartz.impl.StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME;
public abstract class BaseHapiScheduler implements IHapiScheduler { public abstract class BaseHapiScheduler implements IHapiScheduler {
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class); private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class);
@ -96,7 +102,7 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
protected void setProperties() { protected void setProperties() {
addProperty("org.quartz.threadPool.threadCount", "4"); addProperty("org.quartz.threadPool.threadCount", "4");
myProperties.setProperty(PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId()); myProperties.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId());
addProperty("org.quartz.threadPool.threadNamePrefix", getThreadPrefix()); addProperty("org.quartz.threadPool.threadNamePrefix", getThreadPrefix());
} }

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.sched;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%
@ -24,7 +24,7 @@ import ca.uhn.fhir.util.StopWatch;
import com.google.common.collect.Queues; import com.google.common.collect.Queues;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.collections4.queue.CircularFifoQueue; import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.hl7.fhir.dstu3.model.InstantType; import org.hl7.fhir.r4.model.InstantType;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * hapi-fhir-jpa
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.util;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
@ -347,16 +346,12 @@ public class TestUtil {
} }
} }
public static void sleepAtLeast(long theMillis) {
HapiTransactionService.sleepAtLeast(theMillis, true);
}
public static InstantType getTimestamp(IBaseResource resource) { public static InstantType getTimestamp(IBaseResource resource) {
return new InstantType(new Date(resource.getMeta().getLastUpdated().getTime())); return new InstantType(new Date(resource.getMeta().getLastUpdated().getTime()));
} }
public static void sleepOneClick() { public static void sleepOneClick() {
sleepAtLeast(1); ca.uhn.fhir.util.TestUtil.sleepAtLeast(1);
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE5-SNAPSHOT</version> <version>5.6.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -90,6 +90,11 @@
<artifactId>hapi-fhir-jpaserver-searchparam</artifactId> <artifactId>hapi-fhir-jpaserver-searchparam</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-sql-migrate</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-model</artifactId> <artifactId>hapi-fhir-jpaserver-model</artifactId>
@ -147,13 +152,9 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-batch</artifactId> <artifactId>hapi-fhir-batch</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>net.ttddyy</groupId>
<artifactId>datasource-proxy</artifactId>
</dependency>
<dependency> <dependency>
<groupId>ch.qos.logback</groupId> <groupId>ch.qos.logback</groupId>
@ -307,12 +308,6 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
</dependency>
<!-- Spring --> <!-- Spring -->
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
@ -324,24 +319,6 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<exclusions>
<exclusion>
<artifactId>xml-apis</artifactId>
<groupId>xml-apis</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId> <artifactId>spring-data-jpa</artifactId>
@ -376,40 +353,6 @@
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-websocket</artifactId> <artifactId>spring-websocket</artifactId>
</dependency> </dependency>
<!-- Hibernate -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-java8</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml</groupId>
<artifactId>classmate</artifactId>
</exclusion>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>javax.el</groupId> <groupId>javax.el</groupId>
<artifactId>javax.el-api</artifactId> <artifactId>javax.el-api</artifactId>
@ -426,16 +369,6 @@
<artifactId>log4j-to-slf4j</artifactId> <artifactId>log4j-to-slf4j</artifactId>
</dependency> </dependency>
<!-- Hibernate Search --> <!-- Hibernate Search -->
<dependency>
<groupId>org.hibernate.search</groupId>
<artifactId>hibernate-search-mapper-orm</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.elasticsearch.client</groupId> <groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId> <artifactId>elasticsearch-rest-high-level-client</artifactId>
@ -467,10 +400,6 @@
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>

View File

@ -29,10 +29,6 @@ import ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@Configuration @Configuration
//When you define a new batch job, add it here. //When you define a new batch job, add it here.
@Import({ @Import({
@ -45,60 +41,4 @@ import java.util.Set;
MdmClearJobConfig.class MdmClearJobConfig.class
}) })
public class BatchJobsConfig { public class BatchJobsConfig {
/*
* Bulk Export
*/
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
public static final String BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP = "bulkExportGenerateResourceFilesStep";
/*
* Bulk Import
*/
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
public static final String BULK_IMPORT_PROCESSING_STEP = "bulkImportProcessingStep";
/**
* This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the
* number of processed records.
*
* This is provided since a job might have multiple steps that the same data passes
* through, so you can't just sum up the total of all of them.
*
* For any given batch job type, there should only be one step name in this set
*/
public static final Set<String> RECORD_PROCESSING_STEP_NAMES;
static {
HashSet<String> recordProcessingStepNames = new HashSet<>();
recordProcessingStepNames.add(BULK_IMPORT_PROCESSING_STEP);
recordProcessingStepNames.add(BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP);
RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
}
/**
* Delete Expunge
*/
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
/**
* Reindex
*/
public static final String REINDEX_JOB_NAME = "reindexJob";
/**
* Reindex Everything
*/
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
/**
* MDM Clear
*/
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
} }

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.batch.job;
*/ */
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -29,8 +30,6 @@ import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator; import org.springframework.batch.core.JobParametersValidator;
import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST;
/** /**
* This class will prevent a job from running any of the provided URLs are not valid on this server. * This class will prevent a job from running any of the provided URLs are not valid on this server.
*/ */
@ -50,7 +49,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("This job requires Parameters: [urlList]"); throw new JobParametersInvalidException("This job requires Parameters: [urlList]");
} }
RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(JOB_PARAM_REQUEST_LIST)); RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(BatchConstants.JOB_PARAM_REQUEST_LIST));
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) { for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
String url = partitionedUrl.getUrl(); String url = partitionedUrl.getUrl();
try { try {
@ -60,7 +59,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server."); throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
} }
} catch (UnsupportedOperationException e) { } catch (UnsupportedOperationException e) {
throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage()); throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + BatchConstants.JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
} }
} }
} }

View File

@ -24,8 +24,8 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader; import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
@ -55,7 +55,7 @@ public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME) @Qualifier(BatchConstants.MDM_CLEAR_JOB_NAME)
private Job myMdmClearJob; private Job myMdmClearJob;
@Override @Override

View File

@ -41,7 +41,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.MDM_CLEAR_JOB_NAME;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig; import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
@ -58,22 +59,17 @@ import java.util.function.Function;
/** /**
* This Spring Batch reader takes 4 parameters: * This Spring Batch reader takes 4 parameters:
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on * {@link BatchConstants#JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used. * {@link BatchConstants#JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for * {@link BatchConstants#JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
* <p> * <p>
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null * The reader will return at most {@link BatchConstants#JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching entities are available. It returns the resources in reverse chronological order * once no more matching entities are available. It returns the resources in reverse chronological order
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH} * and stores where it's at in the Spring Batch execution context with the key {@link BatchConstants#CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit * appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off. * restarting jobs that use this reader so it can pick up where it left off.
*/ */
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream { public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class); private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater(); private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>(); private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
@ -88,30 +84,30 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
private Date myStartTime; private Date myStartTime;
private static String highKey(int theIndex) { private static String highKey(int theIndex) {
return CURRENT_THRESHOLD_HIGH + "." + theIndex; return BatchConstants.CURRENT_THRESHOLD_HIGH + "." + theIndex;
} }
@Nonnull @Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) { public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map<String, JobParameter> map = new HashMap<>(); Map<String, JobParameter> map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName)); map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson())); map.put(BatchConstants.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM))); map.put(BatchConstants.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) { if (theBatchSize != null) {
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue())); map.put(BatchConstants.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
} }
JobParameters parameters = new JobParameters(map); JobParameters parameters = new JobParameters(map);
return parameters; return parameters;
} }
@Autowired @Autowired
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) { public void setRequestListJson(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson); RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls(); myPartitionedUrls = requestListJson.getPartitionedUrls();
} }
@Autowired @Autowired
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) { public void setStartTime(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime; myStartTime = theStartTime;
} }
@ -166,8 +162,8 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override @Override
public void open(ExecutionContext executionContext) throws ItemStreamException { public void open(ExecutionContext executionContext) throws ItemStreamException {
if (executionContext.containsKey(CURRENT_URL_INDEX)) { if (executionContext.containsKey(BatchConstants.CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue(); myUrlIndex = new Long(executionContext.getLong(BatchConstants.CURRENT_URL_INDEX)).intValue();
} }
for (int index = 0; index < myPartitionedUrls.size(); ++index) { for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index); String key = highKey(index);
@ -181,7 +177,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override @Override
public void update(ExecutionContext executionContext) throws ItemStreamException { public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex); executionContext.putLong(BatchConstants.CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) { for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index); Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) { if (date != null) {
@ -199,7 +195,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
} }
@Autowired @Autowired
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) { public void setBatchSize(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize; myBatchSize = theBatchSize;
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@ -57,7 +58,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
@Value("#{stepExecutionContext['resourceType']}") @Value("#{stepExecutionContext['resourceType']}")
protected String myResourceType; protected String myResourceType;
@Value("#{jobExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") @Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
protected String myJobUUID; protected String myJobUUID;
@Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}") @Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}")
protected Long myReadChunkSize; protected Long myReadChunkSize;

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.ExitStatus;
@ -49,9 +50,9 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
public ExitStatus afterStep(StepExecution theStepExecution) { public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context. //Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) { if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
} }
assert isNotBlank(jobUuid); assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription(); String exitDescription = theStepExecution.getExitStatus().getExitDescription();

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor; import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor; import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
@ -51,7 +51,6 @@ import java.util.List;
@Configuration @Configuration
public class BulkExportJobConfig { public class BulkExportJobConfig {
public static final String JOB_UUID_PARAMETER = "jobUUID";
public static final String READ_CHUNK_PARAMETER = "readChunkSize"; public static final String READ_CHUNK_PARAMETER = "readChunkSize";
public static final String EXPAND_MDM_PARAMETER = "expandMdm"; public static final String EXPAND_MDM_PARAMETER = "expandMdm";
public static final String GROUP_ID_PARAMETER = "groupId"; public static final String GROUP_ID_PARAMETER = "groupId";
@ -88,7 +87,7 @@ public class BulkExportJobConfig {
@Bean @Bean
@Lazy @Lazy
public Job bulkExportJob() { public Job bulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchConstants.BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
.next(bulkExportPartitionStep()) .next(bulkExportPartitionStep())
@ -111,7 +110,7 @@ public class BulkExportJobConfig {
@Bean @Bean
@Lazy @Lazy
public Job groupBulkExportJob() { public Job groupBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
.validator(groupBulkJobParameterValidator()) .validator(groupBulkJobParameterValidator())
.validator(bulkExportJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
@ -123,7 +122,7 @@ public class BulkExportJobConfig {
@Bean @Bean
@Lazy @Lazy
public Job patientBulkExportJob() { public Job patientBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
.next(patientPartitionStep()) .next(patientPartitionStep())
@ -169,7 +168,7 @@ public class BulkExportJobConfig {
@Bean @Bean
public Step bulkExportGenerateResourceFilesStep() { public Step bulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP) return myStepBuilderFactory.get(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP)
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(bulkItemReader()) .reader(bulkItemReader())
.processor(myPidToIBaseResourceProcessor) .processor(myPidToIBaseResourceProcessor)
@ -217,7 +216,7 @@ public class BulkExportJobConfig {
@Bean @Bean
public Step bulkExportPartitionStep() { public Step bulkExportPartitionStep() {
return myStepBuilderFactory.get("partitionStep") return myStepBuilderFactory.get("partitionStep")
.partitioner(BatchJobsConfig.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner()) .partitioner(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
.step(bulkExportGenerateResourceFilesStep()) .step(bulkExportGenerateResourceFilesStep())
.build(); .build();
} }

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
@ -34,8 +35,6 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.util.Arrays; import java.util.Arrays;
import java.util.Optional; import java.util.Optional;
import static org.slf4j.LoggerFactory.getLogger;
/** /**
* This class will prevent a job from running if the UUID does not exist or is invalid. * This class will prevent a job from running if the UUID does not exist or is invalid.
*/ */
@ -59,7 +58,7 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
if (readChunkSize == null || readChunkSize < 1) { if (readChunkSize == null || readChunkSize < 1) {
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. "); errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
} }
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER); String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID); Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) { if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". "); errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");

View File

@ -20,10 +20,11 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.ChunkContext;
@ -42,18 +43,27 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
@Autowired @Autowired
private IBulkDataExportSvc myBulkDataExportSvc; private IBulkDataExportSvc myBulkDataExportSvc;
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext
.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString(BatchConstants.JOB_UUID_PARAMETER, theJobUUID);
}
@Override @Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception { public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters(); Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job. //We can leave early if they provided us with an existing job.
if (jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER)) { if (jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER)) {
addUUIDToJobContext(theChunkContext, (String)jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER)); addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;
} else { } else {
String resourceTypes = (String)jobParameters.get("resourceTypes"); String resourceTypes = (String) jobParameters.get("resourceTypes");
Date since = (Date)jobParameters.get("since"); Date since = (Date) jobParameters.get("since");
String filters = (String)jobParameters.get("filters"); String filters = (String) jobParameters.get("filters");
Set<String> filterSet; Set<String> filterSet;
if (StringUtils.isBlank(filters)) { if (StringUtils.isBlank(filters)) {
filterSet = null; filterSet = null;
@ -86,13 +96,4 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;
} }
} }
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext
.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUUID);
}
} }

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner; import org.springframework.batch.core.partition.support.Partitioner;
@ -60,7 +61,7 @@ public class ResourceTypePartitioner implements Partitioner {
// The worker step needs to know which parent job it is processing for, and which collection entity it will be // The worker step needs to know which parent job it is processing for, and which collection entity it will be
// attaching its results to. // attaching its results to.
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID); context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putLong("bulkExportCollectionEntityId", collectionEntityId); context.putLong("bulkExportCollectionEntityId", collectionEntityId);
// Name the partition based on the resource type // Name the partition based on the resource type

View File

@ -30,9 +30,8 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
@ -49,6 +48,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -114,15 +114,15 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private IBatchJobSubmitter myJobSubmitter; private IBatchJobSubmitter myJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkExportJob; private org.springframework.batch.core.Job myBulkExportJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myGroupBulkExportJob; private org.springframework.batch.core.Job myGroupBulkExportJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myPatientBulkExportJob; private org.springframework.batch.core.Job myPatientBulkExportJob;
private Set<String> myCompartmentResources; private Set<String> myCompartmentResources;
@ -243,7 +243,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private void processJob(BulkExportJobEntity theBulkExportJobEntity) { private void processJob(BulkExportJobEntity theBulkExportJobEntity) {
String theJobUuid = theBulkExportJobEntity.getJobId(); String theJobUuid = theBulkExportJobEntity.getJobId();
JobParametersBuilder parameters = new JobParametersBuilder() JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUuid) .addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE); .addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid); ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);

View File

@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.elasticsearch.client.enrich.ExecutePolicyResponse;
import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.StepExecutionListener;
@ -39,7 +38,7 @@ public class ActivateBulkImportEntityStepListener implements StepExecutionListen
@Override @Override
public void beforeStep(StepExecution theStepExecution) { public void beforeStep(StepExecution theStepExecution) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid != null) { if (jobUuid != null) {
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING); myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
} }

View File

@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
@ -42,7 +42,7 @@ public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord>
private IBulkDataImportSvc myBulkDataImportSvc; private IBulkDataImportSvc myBulkDataImportSvc;
@Autowired @Autowired
private FhirContext myFhirContext; private FhirContext myFhirContext;
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") @Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid; private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex; private int myFileIndex;

View File

@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
@ -39,7 +39,7 @@ import java.util.List;
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> { public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class); private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") @Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid; private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex; private int myFileIndex;

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.BatchStatus;
@ -36,7 +36,7 @@ import org.springframework.beans.factory.annotation.Value;
*/ */
public class BulkImportJobCloser implements Tasklet { public class BulkImportJobCloser implements Tasklet {
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") @Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID; private String myJobUUID;
@Autowired @Autowired

View File

@ -21,14 +21,13 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/ */
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchConstants; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersValidator; import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.Step; import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.JobScope; import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
@ -50,8 +49,8 @@ import org.springframework.retry.policy.TimeoutRetryPolicy;
import javax.batch.api.chunk.listener.RetryProcessListener; import javax.batch.api.chunk.listener.RetryProcessListener;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_PROCESSING_STEP; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_PROCESSING_STEP;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -52,7 +52,7 @@ public class BulkImportJobParameterValidator implements JobParametersValidator {
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> { String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder(); StringBuilder errorBuilder = new StringBuilder();
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER); String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID); Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) { if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: "); errorBuilder.append("There is no persisted job that exists with UUID: ");

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -43,7 +43,7 @@ public class BulkImportPartitioner implements Partitioner {
private static final Logger ourLog = getLogger(BulkImportPartitioner.class); private static final Logger ourLog = getLogger(BulkImportPartitioner.class);
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") @Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID; private String myJobUUID;
@Autowired @Autowired
@ -61,7 +61,7 @@ public class BulkImportPartitioner implements Partitioner {
String fileDescription = myBulkDataImportSvc.getFileDescription(myJobUUID, i); String fileDescription = myBulkDataImportSvc.getFileDescription(myJobUUID, i);
ExecutionContext context = new ExecutionContext(); ExecutionContext context = new ExecutionContext();
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID); context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putInt(FILE_INDEX, i); context.putInt(FILE_INDEX, i);
context.put(ROW_PROCESSING_MODE, job.getProcessingMode()); context.put(ROW_PROCESSING_MODE, job.getProcessingMode());
context.put(JOB_DESCRIPTION, job.getJobDescription()); context.put(JOB_DESCRIPTION, job.getJobDescription());

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.ExitStatus;
@ -52,9 +52,9 @@ public class BulkImportStepListener implements StepExecutionListener, RetryListe
public ExitStatus afterStep(StepExecution theStepExecution) { public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context. //Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) { if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
} }
assert isNotBlank(jobUuid); assert isNotBlank(jobUuid);

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet; import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepContribution;
@ -37,8 +37,8 @@ public class CreateBulkImportEntityTasklet implements Tasklet {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters(); Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job. //We can leave early if they provided us with an existing job.
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID"); ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER)); CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;
} }

View File

@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.svc;
*/ */
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
@ -79,7 +79,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
@Autowired @Autowired
private IBatchJobSubmitter myJobSubmitter; private IBatchJobSubmitter myJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME) @Qualifier(BatchConstants.BULK_IMPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkImportJob; private org.springframework.batch.core.Job myBulkImportJob;
@Autowired @Autowired
private DaoConfig myDaoConfig; private DaoConfig myDaoConfig;
@ -271,7 +271,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive"); ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive");
JobParametersBuilder parameters = new JobParametersBuilder() JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId) .addString(BatchConstants.JOB_UUID_PARAMETER, jobId)
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize); .addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
if (isNotBlank(theBulkExportJobEntity.getJobDescription())) { if (isNotBlank(theBulkExportJobEntity.getJobDescription())) {

View File

@ -11,9 +11,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl; import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -118,7 +119,7 @@ public class HapiTransactionService {
theTransactionDetails.clearUserData(BaseHapiFhirDao.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS); theTransactionDetails.clearUserData(BaseHapiFhirDao.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS);
double sleepAmount = (250.0d * i) * Math.random(); double sleepAmount = (250.0d * i) * Math.random();
long sleepAmountLong = (long) sleepAmount; long sleepAmountLong = (long) sleepAmount;
sleepAtLeast(sleepAmountLong, false); TestUtil.sleepAtLeast(sleepAmountLong, false);
ourLog.info("About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", sleepAmountLong); ourLog.info("About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", sleepAmountLong);
continue; continue;
@ -164,22 +165,4 @@ public class HapiTransactionService {
super(theThrowable); super(theThrowable);
} }
} }
@SuppressWarnings("BusyWait")
public static void sleepAtLeast(long theMillis, boolean theLogProgress) {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() <= start + theMillis) {
try {
long timeSinceStarted = System.currentTimeMillis() - start;
long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
if (theLogProgress) {
ourLog.info("Sleeping for {}ms", timeToSleep);
}
Thread.sleep(timeToSleep);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
ourLog.error("Interrupted", e);
}
}
}
} }

View File

@ -25,8 +25,8 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
@ -52,7 +52,7 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME) @Qualifier(BatchConstants.DELETE_EXPUNGE_JOB_NAME)
private Job myDeleteExpungeJob; private Job myDeleteExpungeJob;
@Autowired @Autowired
FhirContext myFhirContext; FhirContext myFhirContext;

View File

@ -20,12 +20,10 @@ package ca.uhn.fhir.jpa.delete.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter; import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step; import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
@ -39,7 +37,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.List; import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.DELETE_EXPUNGE_JOB_NAME;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -29,9 +29,7 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.ReferenceParam;
import joptsimple.internal.Strings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -39,8 +37,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import static org.slf4j.LoggerFactory.getLogger;
/** /**
* This interceptor replaces the auto-generated CapabilityStatement that is generated * This interceptor replaces the auto-generated CapabilityStatement that is generated
* by the HAPI FHIR Server with a static hard-coded resource. * by the HAPI FHIR Server with a static hard-coded resource.

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.migrate.tasks;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server - Migration * HAPI FHIR JPA Server
* %% * %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc. * Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %% * %%

View File

@ -50,13 +50,13 @@ public class BaseJpaSystemProvider<T, MT> extends BaseJpaProvider implements IJp
* @deprecated * @deprecated
*/ */
@Deprecated @Deprecated
public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = "$mark-all-resources-for-reindexing"; public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = ProviderConstants.MARK_ALL_RESOURCES_FOR_REINDEXING;
/** /**
* @see ProviderConstants#OPERATION_REINDEX * @see ProviderConstants#OPERATION_REINDEX
* @deprecated * @deprecated
*/ */
@Deprecated @Deprecated
public static final String PERFORM_REINDEXING_PASS = "$perform-reindexing-pass"; public static final String PERFORM_REINDEXING_PASS = ProviderConstants.PERFORM_REINDEXING_PASS;
private IFhirSystemDao<T, MT> myDao; private IFhirSystemDao<T, MT> myDao;
@Autowired @Autowired

View File

@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.reindex;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader; import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
@ -53,10 +53,10 @@ public class ReindexJobSubmitterImpl implements IReindexJobSubmitter {
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.REINDEX_JOB_NAME) @Qualifier(BatchConstants.REINDEX_JOB_NAME)
private Job myReindexJob; private Job myReindexJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME) @Qualifier(BatchConstants.REINDEX_EVERYTHING_JOB_NAME)
private Job myReindexEverythingJob; private Job myReindexEverythingJob;
@Override @Override

View File

@ -35,7 +35,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.List; import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.REINDEX_EVERYTHING_JOB_NAME;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -20,18 +20,14 @@ package ca.uhn.fhir.jpa.reindex.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step; import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.listener.ExecutionContextPromotionListener; import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
@ -39,7 +35,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.List; import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.REINDEX_JOB_NAME;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a

View File

@ -6,8 +6,8 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder;
import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder;
@ -102,15 +102,15 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private BatchJobHelper myBatchJobHelper; private BatchJobHelper myBatchJobHelper;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.BULK_EXPORT_JOB_NAME)
private Job myBulkJob; private Job myBulkJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
private Job myGroupBulkJob; private Job myGroupBulkJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) @Qualifier(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
private Job myPatientBulkJob; private Job myPatientBulkJob;
private IIdType myPatientGroupId; private IIdType myPatientGroupId;
@ -328,11 +328,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private void awaitAllBulkJobCompletions() { private void awaitAllBulkJobCompletions() {
myBatchJobHelper.awaitAllBulkJobCompletions( myBatchJobHelper.awaitAllBulkJobCompletions(
BatchJobsConfig.BULK_EXPORT_JOB_NAME, BatchConstants.BULK_EXPORT_JOB_NAME,
BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME,
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, BatchConstants.GROUP_BULK_EXPORT_JOB_NAME,
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME, BatchConstants.DELETE_EXPUNGE_JOB_NAME,
BatchJobsConfig.MDM_CLEAR_JOB_NAME BatchConstants.MDM_CLEAR_JOB_NAME
); );
} }

View File

@ -5,7 +5,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
@ -44,7 +44,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME; import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
import static org.awaitility.Awaitility.await; import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
@ -212,7 +212,7 @@ public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuil
} }
protected List<JobExecution> awaitAllBulkImportJobCompletion() { protected List<JobExecution> awaitAllBulkImportJobCompletion() {
return myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME); return myBatchJobHelper.awaitAllBulkJobCompletions(BatchConstants.BULK_IMPORT_JOB_NAME);
} }
@Interceptor @Interceptor

View File

@ -1451,9 +1451,9 @@ public class FhirResourceDaoR4SearchNoHashesTest extends BaseJpaR4Test {
id1b = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); id1b = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
} }
TestUtil.sleepAtLeast(1100); ca.uhn.fhir.util.TestUtil.sleepAtLeast(1100);
DateTimeType beforeR2 = new DateTimeType(new Date(), TemporalPrecisionEnum.MILLI); DateTimeType beforeR2 = new DateTimeType(new Date(), TemporalPrecisionEnum.MILLI);
TestUtil.sleepAtLeast(1100); ca.uhn.fhir.util.TestUtil.sleepAtLeast(1100);
IIdType id2; IIdType id2;
{ {

View File

@ -6,11 +6,11 @@ import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl; import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.TestUtil;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -116,7 +116,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
} }
assertEquals(searchUuid1, searchUuid2); assertEquals(searchUuid1, searchUuid2);
TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1); ca.uhn.fhir.util.TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1);
// We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search // We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search
@ -291,7 +291,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
} }
assertEquals(searchUuid1, searchUuid2); assertEquals(searchUuid1, searchUuid2);
TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1); ca.uhn.fhir.util.TestUtil.sleepAtLeast(reuseCachedSearchResultsForMillis + 1);
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem(); myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
// We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search // We're now past reuseCachedSearchResultsForMillis so we shouldn't reuse the search
@ -376,7 +376,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
} }
}); });
if (search == null) { if (search == null) {
TestUtil.sleepAtLeast(100); ca.uhn.fhir.util.TestUtil.sleepAtLeast(100);
} }
} }
assertNotNull(search, "Search " + bundleProvider.getUuid() + " not found on disk after 10 seconds"); assertNotNull(search, "Search " + bundleProvider.getUuid() + " not found on disk after 10 seconds");

View File

@ -22,7 +22,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast; import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;

View File

@ -72,7 +72,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast; import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
import static org.apache.commons.lang3.StringUtils.countMatches; import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.delete.job; package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -23,7 +23,7 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME) @Qualifier(BatchConstants.DELETE_EXPUNGE_JOB_NAME)
private Job myDeleteExpungeJob; private Job myDeleteExpungeJob;
@Autowired @Autowired
private BatchJobHelper myBatchJobHelper; private BatchJobHelper myBatchJobHelper;

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.delete.job; package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig; import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader; import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
@ -37,10 +37,10 @@ public class ReindexJobTest extends BaseJpaR4Test {
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.REINDEX_JOB_NAME) @Qualifier(BatchConstants.REINDEX_JOB_NAME)
private Job myReindexJob; private Job myReindexJob;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME) @Qualifier(BatchConstants.REINDEX_EVERYTHING_JOB_NAME)
private Job myReindexEverythingJob; private Job myReindexEverythingJob;
@Autowired @Autowired
private BatchJobHelper myBatchJobHelper; private BatchJobHelper myBatchJobHelper;

View File

@ -0,0 +1,161 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.AfterEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Stream;
// TODO KHS copied from hapi-fhir-sql-migrate
public abstract class BaseTest {
private static final String DATABASE_NAME = "DATABASE";
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
private static int ourDatabaseUrl = 0;
private BasicDataSource myDataSource;
private String myUrl;
private FlywayMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public static Stream<Supplier<TestDatabaseDetails>> data() {
ourLog.info("H2: {}", org.h2.Driver.class.toString());
ArrayList<Supplier<TestDatabaseDetails>> retVal = new ArrayList<>();
// H2
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return "H2";
}
});
// Derby
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return "Derby";
}
});
return retVal.stream();
}
public void before(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
TestDatabaseDetails testDatabaseDetails = theTestDatabaseDetails.get();
myUrl = testDatabaseDetails.myUrl;
myConnectionProperties = testDatabaseDetails.myConnectionProperties;
myDataSource = testDatabaseDetails.myDataSource;
myMigrator = testDatabaseDetails.myMigrator;
}
public String getUrl() {
return myUrl;
}
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
return myConnectionProperties;
}
protected BasicDataSource getDataSource() {
return myDataSource;
}
@AfterEach
public void resetMigrationVersion() throws SQLException {
if (getConnectionProperties() != null) {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (tableNames.contains(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME)) {
executeSql("DELETE from " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME + " where \"installed_rank\" > 0");
}
}
}
protected void executeSql(@Language("SQL") String theSql, Object... theArgs) {
myConnectionProperties.getTxTemplate().execute(t -> {
myConnectionProperties.newJdbcTemplate().update(theSql, theArgs);
return null;
});
}
protected List<Map<String, Object>> executeQuery(@Language("SQL") String theSql, Object... theArgs) {
return myConnectionProperties.getTxTemplate().execute(t -> {
return myConnectionProperties.newJdbcTemplate().query(theSql, theArgs, new ColumnMapRowMapper());
});
}
public FlywayMigrator getMigrator() {
return myMigrator;
}
@AfterEach
public void after() {
if (myConnectionProperties != null) {
myConnectionProperties.close();
}
}
protected DriverTypeEnum getDriverType() {
return myConnectionProperties.getDriverType();
}
public static class TestDatabaseDetails {
private final String myUrl;
private final DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, FlywayMigrator theMigrator) {
myUrl = theUrl;
myConnectionProperties = theConnectionProperties;
myDataSource = theDataSource;
myMigrator = theMigrator;
}
public DriverTypeEnum getDriverType() {
return myConnectionProperties.getDriverType();
}
}
}

Some files were not shown because too many files have changed in this diff Show More