Merge branch 'master' into custom-elastic-index

This commit is contained in:
Tadgh 2021-09-07 13:56:44 -04:00
commit eabe3e7c74
134 changed files with 2007 additions and 1068 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -75,6 +75,7 @@ public enum VersionEnum {
V5_4_1,
V5_4_2,
V5_5_0,
V5_5_1,
V5_6_0
;

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,3 @@
---
release-date: "2021-08-30"
codename: "Quasar"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 2793
title: "Previously, when using the Expunge Everything operation, caches could retain old invalid values. This has been corrected. Thanks to [Ben Li-Sauerwine](https://github.com/theGOTOguy) for the fix!"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 2837
title: "The :not modifier does not currently work for observations with multiple codes for the search. This is fixed."

View File

@ -0,0 +1,3 @@
---
type: change
title: "The $mdm-clear operation has been changed to use Spring Batch."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2923
title: "$lookup operation cache was based on system and code, it becomes a defect
after adding displayLanguage support. Problem is now fixed."

View File

@ -0,0 +1,7 @@
---
type: add
issue: 2933
backport: 5.5.1
jira: SMILE-3056
title: "Fixed a regression which causes transactions with multiple identical ifNoneExist clauses to create duplicate data."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2935
jira: SMILE-3022
title: "No resource returned when search with percent sign. Problem is now fixed"

View File

@ -38,7 +38,7 @@ section.server_plain.title=Plain Server
page.server_plain.server_types=REST Server Types
page.server_plain.introduction=Plain Server Introduction
page.server_plain.get_started=Get Started ⚡
page.server_plain.resource_providers=Resource Providers and Plan Providers
page.server_plain.resource_providers=Resource Providers and Plain Providers
page.server_plain.rest_operations=REST Operations: Overview
page.server_plain.rest_operations_search=REST Operations: Search
page.server_plain.rest_operations_operations=REST Operations: Extended Operations

View File

@ -567,11 +567,15 @@ Note that the request goes to the root of the FHIR server, and not the `Organiza
## Clearing MDM Links
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This operation is meant to be used during the rules-tuning phase of the MDM implementation so that you can quickly test your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links and Golden Resources.
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This
operation is intended to be used during the rules-tuning phase of the MDM implementation so that you can quickly test
your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links
and Golden Resources.
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources are deleted and expunged from the server.
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources
are deleted and expunged from the server.
This operation takes a single optional Parameter.
This operation takes two optional Parameters.
<table class="table table-striped table-condensed">
<thead>
@ -584,11 +588,21 @@ This operation takes a single optional Parameter.
</thead>
<tbody>
<tr>
<td>sourceType</td>
<td>resourceType</td>
<td>String</td>
<td>0..*</td>
<td>
The Source resource types you would like to clear. If omitted, all resource types will be cleared.
</td>
</tr>
<tr>
<td>batchSize</td>
<td>Integer</td>
<td>0..1</td>
<td>
The Source Resource type you would like to clear. If omitted, will operate over all links.
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
property.
</td>
</tr>
</tbody>
@ -598,33 +612,27 @@ This operation takes a single optional Parameter.
Use an HTTP POST to the following URL to invoke this operation:
```url
http://example.com/$mdm-clear
```
```http
POST /$mdm-clear
Content-Type: application/fhir+json
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "sourceType",
"name": "resourceType",
"valueString": "Patient"
}, {
"name": "resourceType",
"valueString": "Practitioner"
}, {
"name": "batchSize",
"valueDecimal": 1000
} ]
}
```
This operation returns the number of MDM links that were cleared. The following is a sample response:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "reset",
"valueDecimal": 5
} ]
}
```
This operation returns the job execution id of the Spring Batch job that will be run to remove all the links and their
golden resources.
## Batch-creating MDM Links

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch;
* #L%
*/
import ca.uhn.fhir.jpa.batch.mdm.job.MdmClearJobConfig;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
@ -40,7 +41,8 @@ import java.util.Set;
BulkImportJobConfig.class,
DeleteExpungeJobConfig.class,
ReindexJobConfig.class,
ReindexEverythingJobConfig.class
ReindexEverythingJobConfig.class,
MdmClearJobConfig.class
})
public class BatchJobsConfig {
@ -94,4 +96,9 @@ public class BatchJobsConfig {
*/
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
/**
* MDM Clear
*/
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
}

View File

@ -20,14 +20,46 @@ package ca.uhn.fhir.jpa.batch;
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.reindex.job.ReindexWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class CommonBatchJobConfig {
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
@Bean
public MultiUrlJobParameterValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
}
@Bean
@StepScope
public SqlExecutorWriter sqlExecutorWriter() {
return new SqlExecutorWriter();
}
@Bean
@StepScope
public PidReaderCounterListener pidCountRecorderListener() {
return new PidReaderCounterListener();
}
@Bean
@StepScope
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
return new ReverseCronologicalBatchResourcePidReader();
}
@Bean
@StepScope
@ -41,4 +73,10 @@ public class CommonBatchJobConfig {
return new GoldenResourceAnnotatingProcessor();
}
@Bean
@StepScope
public ReindexWriter reindexWriter() {
return new ReindexWriter();
}
}

View File

@ -1,57 +0,0 @@
package ca.uhn.fhir.jpa.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.context.annotation.Bean;
public class MultiUrlProcessorJobConfig {
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
@Bean
public JobParametersValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
}
@Bean
@StepScope
public SqlExecutorWriter sqlExecutorWriter() {
return new SqlExecutorWriter();
}
@Bean
@StepScope
public PidReaderCounterListener pidCountRecorderListener() {
return new PidReaderCounterListener();
}
@Bean
@StepScope
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
return new ReverseCronologicalBatchResourcePidReader();
}
}

View File

@ -43,6 +43,9 @@ public class PartitionedUrlValidator {
@Autowired
FhirContext myFhirContext;
public PartitionedUrlValidator() {
}
/**
* This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request
*/

View File

@ -0,0 +1,15 @@
package ca.uhn.fhir.jpa.batch.mdm;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import org.springframework.beans.factory.annotation.Autowired;
public class MdmBatchJobSubmitterFactoryImpl implements IMdmBatchJobSubmitterFactory {
@Autowired
IMdmClearJobSubmitter myMdmClearJobSubmitter;
@Override
public IMdmClearJobSubmitter getClearJobSubmitter() {
return myMdmClearJobSubmitter;
}
}

View File

@ -0,0 +1,84 @@
package ca.uhn.fhir.jpa.batch.mdm;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.transaction.Transactional;
import java.util.List;
public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
@Autowired
DaoConfig myDaoConfig;
@Autowired
PartitionedUrlValidator myPartitionedUrlValidator;
@Autowired
IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME)
private Job myMdmClearJob;
@Override
@Transactional(Transactional.TxType.NEVER)
public JobExecution submitJob(Integer theBatchSize, List<String> theUrls, RequestDetails theRequest) throws JobParametersInvalidException {
if (theBatchSize == null) {
theBatchSize = myDaoConfig.getExpungeBatchSize();
}
if (!myDaoConfig.canDeleteExpunge()) {
throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
}
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrls);
for (String url : theUrls) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(String.class, url);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
}
JobParameters jobParameters = ReverseCronologicalBatchMdmLinkPidReader.buildJobParameters(ProviderConstants.OPERATION_MDM_CLEAR, theBatchSize, requestListJson);
return myBatchJobSubmitter.runJob(myMdmClearJob, jobParameters);
}
}

View File

@ -0,0 +1,125 @@
package ca.uhn.fhir.jpa.batch.mdm.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import java.util.ArrayList;
import java.util.List;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
* $mdm-clear job.
*/
@Configuration
public class MdmClearJobConfig {
public static final String MDM_CLEAR_RESOURCE_LIST_STEP_NAME = "mdm-clear-resource-list-step";
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private DeleteExpungeProcessor myDeleteExpungeProcessor;
@Autowired
@Qualifier("deleteExpungePromotionListener")
private ExecutionContextPromotionListener myDeleteExpungePromotionListener;
@Autowired
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Autowired
private SqlExecutorWriter mySqlExecutorWriter;
@Bean(name = MDM_CLEAR_JOB_NAME)
@Lazy
public Job mdmClearJob() {
return myJobBuilderFactory.get(MDM_CLEAR_JOB_NAME)
.validator(myMultiUrlProcessorParameterValidator)
.start(mdmClearUrlListStep())
.build();
}
@Bean
public Step mdmClearUrlListStep() {
return myStepBuilderFactory.get(MDM_CLEAR_RESOURCE_LIST_STEP_NAME)
.<List<Long>, List<String>>chunk(1)
.reader(reverseCronologicalBatchMdmLinkPidReader())
.processor(deleteThenExpungeCompositeProcessor())
.writer(mySqlExecutorWriter)
.listener(myPidCountRecorderListener)
.listener(myDeleteExpungePromotionListener)
.build();
}
@Bean
@StepScope
public ItemProcessor<List<Long>, List<String>> deleteThenExpungeCompositeProcessor() {
CompositeItemProcessor<List<Long>, List<String>> compositeProcessor = new CompositeItemProcessor<>();
List itemProcessors = new ArrayList<>();
itemProcessors.add(mdmLinkDeleter());
itemProcessors.add(myDeleteExpungeProcessor);
compositeProcessor.setDelegates(itemProcessors);
return compositeProcessor;
}
@Bean
@StepScope
public ReverseCronologicalBatchMdmLinkPidReader reverseCronologicalBatchMdmLinkPidReader() {
return new ReverseCronologicalBatchMdmLinkPidReader();
}
@Bean
public MdmLinkDeleter mdmLinkDeleter() {
return new MdmLinkDeleter();
}
@Bean
public ExecutionContextPromotionListener mdmClearPromotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
return listener;
}
}

View File

@ -0,0 +1,85 @@
package ca.uhn.fhir.jpa.batch.mdm.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.SliceImpl;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.stream.Collectors;
/**
* Take MdmLink pids in and output golden resource pids out
*/
public class MdmLinkDeleter implements ItemProcessor<List<Long>, List<Long>> {
public static final String PROCESS_NAME = "MdmClear";
public static final String THREAD_PREFIX = "mdmClear";
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkDeleter.class);
@Autowired
protected PlatformTransactionManager myTxManager;
@Autowired
IMdmLinkDao myMdmLinkDao;
@Autowired
DaoConfig myDaoConfig;
@Override
public List<Long> process(List<Long> thePidList) throws Exception {
ConcurrentLinkedQueue<Long> goldenPidAggregator = new ConcurrentLinkedQueue<>();
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
partitionRunner.runInPartitionedThreads(new SliceImpl<>(thePidList), pids -> removeLinks(thePidList, goldenPidAggregator));
return new ArrayList<>(goldenPidAggregator);
}
private void removeLinks(List<Long> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(pidList)));
}
public List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<Long> thePids) {
List<MdmLink> links = myMdmLinkDao.findAllById(thePids);
Set<Long> goldenResources = links.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
//and REDIRECT
goldenResources.addAll(links.stream()
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
ourLog.info("Deleting {} MDM link records...", links.size());
myMdmLinkDao.deleteAll(links);
ourLog.info("{} MDM link records deleted", links.size());
return new ArrayList<>(goldenResources);
}
}

View File

@ -0,0 +1,52 @@
package ca.uhn.fhir.jpa.batch.mdm.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.reader.BaseReverseCronologicalBatchPidReader;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import java.util.HashSet;
import java.util.Set;
/**
* This is the same as the parent class, except it operates on MdmLink entities instead of resource entities
*/
public class ReverseCronologicalBatchMdmLinkPidReader extends BaseReverseCronologicalBatchPidReader {
@Autowired
IMdmLinkDao myMdmLinkDao;
@Override
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
String resourceName = resourceSearch.getResourceName();
Pageable pageable = PageRequest.of(0, getBatchSize());
//Expand out the list to handle the REDIRECT/POSSIBLE DUPLICATE ones.
return new HashSet<>(myMdmLinkDao.findPidByResourceNameAndThreshold(resourceName, getCurrentHighThreshold(), pageable));
}
@Override
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
setDateExtractorFunction(pid -> myMdmLinkDao.findById(pid).get().getCreated());
}
}

View File

@ -0,0 +1,193 @@
package ca.uhn.fhir.jpa.batch.reader;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemStream;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
/**
* This Spring Batch reader takes 4 parameters:
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
* <p>
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching entities are available. It returns the resources in reverse chronological order
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
@Autowired
private FhirContext myFhirContext;
@Autowired
private MatchUrlService myMatchUrlService;
private List<PartitionedUrl> myPartitionedUrls;
private Integer myBatchSize;
private int myUrlIndex = 0;
private Date myStartTime;
private static String highKey(int theIndex) {
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
}
@Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map<String, JobParameter> map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
@Autowired
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls();
}
@Autowired
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
@Override
public List<Long> read() throws Exception {
while (myUrlIndex < myPartitionedUrls.size()) {
List<Long> nextBatch = getNextBatch();
if (nextBatch.isEmpty()) {
++myUrlIndex;
continue;
}
return nextBatch;
}
return null;
}
protected List<Long> getNextBatch() {
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
myAlreadyProcessedPidsWithHighDate.putIfAbsent(myUrlIndex, new HashSet<>());
Set<Long> newPids = getNextPidBatch(resourceSearch);
if (ourLog.isDebugEnabled()) {
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
ourLog.debug("Results: {}", newPids);
}
setDateFromPidFunction(resourceSearch);
List<Long> retval = new ArrayList<>(newPids);
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(getCurrentHighThreshold(), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
return retval;
}
protected Date getCurrentHighThreshold() {
return myThresholdHighByUrlIndex.get(myUrlIndex);
}
protected void setDateExtractorFunction(Function<Long, Date> theDateExtractorFunction) {
myBatchDateThresholdUpdater.setDateFromPid(theDateExtractorFunction);
}
protected void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
SearchParameterMap map = resourceSearch.getSearchParameterMap();
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(getCurrentHighThreshold()));
map.setLoadSynchronousUpTo(myBatchSize);
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
}
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
}
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index);
if (executionContext.containsKey(key)) {
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
} else {
myThresholdHighByUrlIndex.put(index, myStartTime);
}
}
}
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) {
executionContext.putLong(highKey(index), date.getTime());
}
}
}
@Override
public void close() throws ItemStreamException {
}
protected Integer getBatchSize() {
return myBatchSize;
}
@Autowired
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}
protected Set<Long> getAlreadySeenPids() {
return myAlreadyProcessedPidsWithHighDate.get(myUrlIndex);
}
protected abstract Set<Long> getNextPidBatch(ResourceSearch resourceSearch);
protected abstract void setDateFromPidFunction(ResourceSearch resourceSearch);
}

View File

@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.batch.reader;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import com.fasterxml.jackson.core.JsonProcessingException;
@ -93,7 +93,7 @@ public class CronologicalBatchAllResourcePidReader implements ItemReader<List<Lo
public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) {
Map<String, JobParameter> map = new HashMap<>();
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson()));
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}

View File

@ -20,209 +20,52 @@ package ca.uhn.fhir.jpa.batch.reader;
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemStream;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This Spring Batch reader takes 4 parameters:
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
* <p>
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching resources are available. It returns the resources in reverse chronological order
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public class ReverseCronologicalBatchResourcePidReader implements ItemReader<List<Long>>, ItemStream {
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String CURRENT_URL_INDEX = "current.url-index";
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
@Autowired
private FhirContext myFhirContext;
@Autowired
private MatchUrlService myMatchUrlService;
public class ReverseCronologicalBatchResourcePidReader extends BaseReverseCronologicalBatchPidReader {
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private BatchResourceSearcher myBatchResourceSearcher;
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private List<PartitionedUrl> myPartitionedUrls;
private Integer myBatchSize;
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
private int myUrlIndex = 0;
private Date myStartTime;
@Autowired
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls();
}
@Autowired
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}
@Autowired
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
@Override
public List<Long> read() throws Exception {
while (myUrlIndex < myPartitionedUrls.size()) {
List<Long> nextBatch = getNextBatch();
if (nextBatch.isEmpty()) {
++myUrlIndex;
continue;
}
return nextBatch;
}
return null;
}
private List<Long> getNextBatch() {
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
Set<Long> retval = new LinkedHashSet<>();
addDateCountAndSortToSearch(resourceSearch);
// Perform the search
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, myBatchSize);
Set<Long> newPids = new LinkedHashSet<>();
Set<Long> alreadySeenPids = myAlreadyProcessedPidsWithHighDate.computeIfAbsent(myUrlIndex, i -> new HashSet<>());
Integer batchSize = getBatchSize();
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, batchSize);
Set<Long> alreadySeenPids = getAlreadySeenPids();
do {
List<Long> pids = resultIter.getNextResultBatch(myBatchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
newPids.addAll(pids);
newPids.removeAll(alreadySeenPids);
} while (newPids.size() < myBatchSize && resultIter.hasNext());
if (ourLog.isDebugEnabled()) {
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
ourLog.debug("Results: {}", newPids);
}
setDateFromPidFunction(resourceSearch);
List<Long> retval = new ArrayList<>(newPids);
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdHighByUrlIndex.get(myUrlIndex), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
List<Long> pids = resultIter.getNextResultBatch(batchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
retval.addAll(pids);
retval.removeAll(alreadySeenPids);
} while (retval.size() < batchSize && resultIter.hasNext());
return retval;
}
private void setDateFromPidFunction(ResourceSearch resourceSearch) {
@Override
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
myBatchDateThresholdUpdater.setDateFromPid(pid -> {
setDateExtractorFunction(pid -> {
IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid));
return oldestResource.getMeta().getLastUpdated();
});
}
private void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
SearchParameterMap map = resourceSearch.getSearchParameterMap();
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
map.setLoadSynchronousUpTo(myBatchSize);
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
}
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
}
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index);
if (executionContext.containsKey(key)) {
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
} else {
myThresholdHighByUrlIndex.put(index, myStartTime);
}
}
}
private static String highKey(int theIndex) {
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
}
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) {
executionContext.putLong(highKey(index), date.getTime());
}
}
}
@Override
public void close() throws ItemStreamException {
}
@Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map<String, JobParameter> map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
}

View File

@ -16,6 +16,8 @@ import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
import ca.uhn.fhir.jpa.batch.mdm.MdmClearJobSubmitterImpl;
import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher;
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
@ -35,7 +37,6 @@ import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
@ -137,6 +138,8 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter;
@ -517,10 +520,20 @@ public abstract class BaseConfig {
}
@Bean
public MdmLinkExpandSvc myMdmLinkExpandSvc() {
public MdmLinkExpandSvc mdmLinkExpandSvc() {
return new MdmLinkExpandSvc();
}
@Bean
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
return new MdmBatchJobSubmitterFactoryImpl();
}
@Bean
IMdmClearJobSubmitter mdmClearJobSubmitter() {
return new MdmClearJobSubmitterImpl();
}
@Bean
@Lazy
public TerminologyUploaderProvider terminologyUploaderProvider() {
@ -893,11 +906,6 @@ public abstract class BaseConfig {
return new DaoSearchParamSynchronizer();
}
@Bean
public DeleteExpungeService deleteExpungeService() {
return new DeleteExpungeService();
}
@Bean
public ResourceTableFKProvider resourceTableFKProvider() {
return new ResourceTableFKProvider();

View File

@ -94,6 +94,8 @@ import org.hl7.fhir.r4.model.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.task.SyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
@ -153,8 +155,8 @@ public abstract class BaseTransactionProcessor {
@Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher;
private ThreadPoolTaskExecutor myExecutor ;
private TaskExecutor myExecutor ;
@VisibleForTesting
public void setDaoConfig(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig;
@ -172,16 +174,25 @@ public abstract class BaseTransactionProcessor {
@PostConstruct
public void start() {
ourLog.trace("Starting transaction processor");
myExecutor = new ThreadPoolTaskExecutor();
myExecutor.setThreadNamePrefix("bundle_batch_");
// For single thread set the value to 1
//myExecutor.setCorePoolSize(1);
//myExecutor.setMaxPoolSize(1);
myExecutor.setCorePoolSize(myDaoConfig.getBundleBatchPoolSize());
myExecutor.setMaxPoolSize(myDaoConfig.getBundleBatchMaxPoolSize());
myExecutor.setQueueCapacity(DaoConfig.DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY);
}
myExecutor.initialize();
private TaskExecutor getTaskExecutor() {
if (myExecutor == null) {
if (myDaoConfig.getBundleBatchPoolSize() > 1) {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setThreadNamePrefix("bundle_batch_");
executor.setCorePoolSize(myDaoConfig.getBundleBatchPoolSize());
executor.setMaxPoolSize(myDaoConfig.getBundleBatchMaxPoolSize());
executor.setQueueCapacity(DaoConfig.DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY);
executor.initialize();
myExecutor = executor;
} else {
SyncTaskExecutor executor = new SyncTaskExecutor();
myExecutor = executor;
}
}
return myExecutor;
}
public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) {
@ -349,7 +360,7 @@ public abstract class BaseTransactionProcessor {
for (int i=0; i<requestEntriesSize; i++ ) {
nextRequestEntry = requestEntries.get(i);
BundleTask bundleTask = new BundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
myExecutor.submit(bundleTask);
getTaskExecutor().execute(bundleTask);
}
// waiting for all tasks to be completed
@ -1554,10 +1565,10 @@ public abstract class BaseTransactionProcessor {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
}
public class BundleTask implements Callable<Void> {
public class BundleTask implements Runnable {
private CountDownLatch myCompletedLatch;
private ServletRequestDetails myRequestDetails;
private RequestDetails myRequestDetails;
private IBase myNextReqEntry;
private Map<Integer, Object> myResponseMap;
private int myResponseOrder;
@ -1565,7 +1576,7 @@ public abstract class BaseTransactionProcessor {
protected BundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
this.myCompletedLatch = theCompletedLatch;
this.myRequestDetails = (ServletRequestDetails)theRequestDetails;
this.myRequestDetails = theRequestDetails;
this.myNextReqEntry = theNextReqEntry;
this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder;
@ -1573,10 +1584,8 @@ public abstract class BaseTransactionProcessor {
}
@Override
public Void call() {
public void run() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) myNextReqEntry);
@ -1609,7 +1618,6 @@ public abstract class BaseTransactionProcessor {
// checking for the parallelism
ourLog.debug("processing bacth for {} is completed", myVersionAdapter.getEntryRequestUrl((IBase)myNextReqEntry));
myCompletedLatch.countDown();
return null;
}
}
}

View File

@ -244,24 +244,28 @@ public class TransactionProcessor extends BaseTransactionProcessor {
if (orPredicates.size() > 1) {
cq.where(cb.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
Map<Long, MatchUrlToResolve> hashToSearchMap = buildHashToSearchMap(searchParameterMapsToResolve);
Map<Long, List<MatchUrlToResolve>> hashToSearchMap = buildHashToSearchMap(searchParameterMapsToResolve);
TypedQuery<ResourceIndexedSearchParamToken> query = myEntityManager.createQuery(cq);
List<ResourceIndexedSearchParamToken> results = query.getResultList();
for (ResourceIndexedSearchParamToken nextResult : results) {
Optional<MatchUrlToResolve> matchedSearch = Optional.ofNullable(hashToSearchMap.get(nextResult.getHashSystemAndValue()));
Optional<List<MatchUrlToResolve>> matchedSearch = Optional.ofNullable(hashToSearchMap.get(nextResult.getHashSystemAndValue()));
if (!matchedSearch.isPresent()) {
matchedSearch = Optional.ofNullable(hashToSearchMap.get(nextResult.getHashValue()));
}
matchedSearch.ifPresent(matchUrlToResolve -> setSearchToResolvedAndPrefetchFoundResourcePid(theTransactionDetails, idsToPreFetch, nextResult, matchUrlToResolve));
matchedSearch.ifPresent(matchUrlsToResolve -> {
matchUrlsToResolve.forEach(matchUrl -> {
setSearchToResolvedAndPrefetchFoundResourcePid(theTransactionDetails, idsToPreFetch, nextResult, matchUrl);
});
});
}
//For each SP Map which did not return a result, tag it as not found.
searchParameterMapsToResolve.stream()
// No matches
.filter(match -> !match.myResolved)
.forEach(match -> {
ourLog.warn("Was unable to match url {} from database", match.myRequestUrl);
ourLog.debug("Was unable to match url {} from database", match.myRequestUrl);
theTransactionDetails.addResolvedMatchUrl(match.myRequestUrl, TransactionDetails.NOT_FOUND);
});
}
@ -322,22 +326,26 @@ public class TransactionProcessor extends BaseTransactionProcessor {
return hashPredicate;
}
private Map<Long, MatchUrlToResolve> buildHashToSearchMap(List<MatchUrlToResolve> searchParameterMapsToResolve) {
Map<Long, MatchUrlToResolve> hashToSearch = new HashMap<>();
private Map<Long, List<MatchUrlToResolve>> buildHashToSearchMap(List<MatchUrlToResolve> searchParameterMapsToResolve) {
Map<Long, List<MatchUrlToResolve>> hashToSearch = new HashMap<>();
//Build a lookup map so we don't have to iterate over the searches repeatedly.
for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) {
if (nextSearchParameterMap.myHashSystemAndValue != null) {
hashToSearch.put(nextSearchParameterMap.myHashSystemAndValue, nextSearchParameterMap);
List<MatchUrlToResolve> matchUrlsToResolve = hashToSearch.getOrDefault(nextSearchParameterMap.myHashSystemAndValue, new ArrayList<>());
matchUrlsToResolve.add(nextSearchParameterMap);
hashToSearch.put(nextSearchParameterMap.myHashSystemAndValue, matchUrlsToResolve);
}
if (nextSearchParameterMap.myHashValue!= null) {
hashToSearch.put(nextSearchParameterMap.myHashValue, nextSearchParameterMap);
List<MatchUrlToResolve> matchUrlsToResolve = hashToSearch.getOrDefault(nextSearchParameterMap.myHashValue, new ArrayList<>());
matchUrlsToResolve.add(nextSearchParameterMap);
hashToSearch.put(nextSearchParameterMap.myHashValue, matchUrlsToResolve);
}
}
return hashToSearch;
}
private void setSearchToResolvedAndPrefetchFoundResourcePid(TransactionDetails theTransactionDetails, List<Long> idsToPreFetch, ResourceIndexedSearchParamToken nextResult, MatchUrlToResolve nextSearchParameterMap) {
ourLog.warn("Matched url {} from database", nextSearchParameterMap.myRequestUrl);
ourLog.debug("Matched url {} from database", nextSearchParameterMap.myRequestUrl);
idsToPreFetch.add(nextResult.getResourcePid());
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, nextSearchParameterMap.myResourceDefinition.getName(), nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));
theTransactionDetails.addResolvedMatchUrl(nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));

View File

@ -20,15 +20,16 @@ package ca.uhn.fhir.jpa.dao.data;
* #L%
*/
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.jpa.entity.MdmLink;
import org.springframework.beans.factory.annotation.Value;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.Date;
import java.util.List;
@Repository
@ -70,4 +71,6 @@ public interface IMdmLinkDao extends JpaRepository<MdmLink, Long> {
@Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult")
List<MdmPidTuple> expandPidsByGoldenResourcePidAndMatchResult(@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
@Query("SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC")
List<Long> findPidByResourceNameAndThreshold(@Param("resourceName") String theResourceName, @Param("highThreshold") Date theHighThreshold, Pageable thePageable);
}

View File

@ -1,201 +0,0 @@
package ca.uhn.fhir.jpa.dao.expunge;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Slice;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
@Service
/**
* DeleteExpunge is now performed using the {@link ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl} Spring Batch job.
*/
@Deprecated
public class DeleteExpungeService {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
@Autowired
private FhirContext myFhirContext;
@Autowired
private ResourceTableFKProvider myResourceTableFKProvider;
@Autowired
private IResourceLinkDao myResourceLinkDao;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private IdHelperService myIdHelper;
public DeleteMethodOutcome expungeByResourcePids(String theUrl, String theResourceName, Slice<Long> thePids, RequestDetails theRequest) {
StopWatch w = new StopWatch();
if (thePids.isEmpty()) {
return new DeleteMethodOutcome();
}
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(String.class, theUrl);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
txTemplate.executeWithoutResult(t -> validateOkToDeleteAndExpunge(thePids));
ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber());
AtomicLong expungedEntitiesCount = new AtomicLong();
AtomicLong expungedResourcesCount = new AtomicLong();
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
partitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
ourLog.info("Expunged a total of {} records", expungedEntitiesCount);
IBaseOperationOutcome oo;
if (expungedResourcesCount.get() == 0) {
oo = OperationOutcomeUtil.newInstance(myFhirContext);
String message = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "unableToDeleteNotFound", theUrl);
String severity = "warning";
String code = "not-found";
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
} else {
oo = OperationOutcomeUtil.newInstance(myFhirContext);
String message = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", expungedResourcesCount.get(), w.getMillis());
String severity = "information";
String code = "informational";
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
}
DeleteMethodOutcome retval = new DeleteMethodOutcome();
retval.setExpungedResourcesCount(expungedResourcesCount.get());
retval.setExpungedEntitiesCount(expungedEntitiesCount.get());
retval.setOperationOutcome(oo);
return retval;
}
public void validateOkToDeleteAndExpunge(Slice<Long> theAllTargetPids) {
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
return;
}
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
partitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
if (conflictResourceLinks.isEmpty()) {
return;
}
ResourceLink firstConflict = conflictResourceLinks.get(0);
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
//actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
//we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
}
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
if (theConflictResourceLinks.isEmpty()) {
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
// Filter out resource links for which we are planning to delete the source.
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
.collect(Collectors.toList());
// We do this in two steps to avoid lock contention on this synchronized list
theConflictResourceLinks.addAll(conflictResourceLinks);
}
}
private void deleteInTransaction(String theResourceName, List<Long> thePidChunk, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
txTemplate.executeWithoutResult(t -> deleteAllRecordsLinkingTo(theResourceName, thePidChunk, theExpungedResourcesCount, theExpungedEntitiesCount, theRequest));
}
private void deleteAllRecordsLinkingTo(String theResourceName, List<Long> thePids, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
HookParams params = new HookParams()
.add(String.class, theResourceName)
.add(List.class, thePids)
.add(AtomicLong.class, theExpungedEntitiesCount)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE_PID_LIST, params);
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
deleteRecordsByColumn(pidListString, resourceForeignKey, theExpungedEntitiesCount);
}
// Lastly we need to delete records from the resource table all of these other tables link to:
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
int entitiesDeleted = deleteRecordsByColumn(pidListString, resourceTablePk, theExpungedEntitiesCount);
theExpungedResourcesCount.addAndGet(entitiesDeleted);
}
private int deleteRecordsByColumn(String thePidListString, ResourceForeignKey theResourceForeignKey, AtomicLong theExpungedEntitiesCount) {
int entitesDeleted = myEntityManager.createNativeQuery("DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString).executeUpdate();
ourLog.info("Expunged {} records from {}", entitesDeleted, theResourceForeignKey.table);
theExpungedEntitiesCount.addAndGet(entitesDeleted);
return entitesDeleted;
}
}

View File

@ -65,6 +65,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresent;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -100,6 +101,9 @@ public class ExpungeEverythingService {
private TransactionTemplate myTxTemplate;
@Autowired
private MemoryCacheService myMemoryCacheService;
@PostConstruct
public void initTxTemplate() {
myTxTemplate = new TransactionTemplate(myPlatformTransactionManager);
@ -122,37 +126,37 @@ public class ExpungeEverythingService {
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
return null;
});
counter.addAndGet(expungeEverythingByType(NpmPackageVersionResourceEntity.class));
counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class));
counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class));
counter.addAndGet(expungeEverythingByType(SearchParamPresent.class));
counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class));
counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class));
counter.addAndGet(expungeEverythingByType(ForcedId.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamQuantity.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamQuantityNormalized.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamString.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamToken.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamUri.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamCoords.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedComboStringUnique.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedComboTokenNonUnique.class));
counter.addAndGet(expungeEverythingByType(ResourceLink.class));
counter.addAndGet(expungeEverythingByType(SearchResult.class));
counter.addAndGet(expungeEverythingByType(SearchInclude.class));
counter.addAndGet(expungeEverythingByType(TermValueSetConceptDesignation.class));
counter.addAndGet(expungeEverythingByType(TermValueSetConcept.class));
counter.addAndGet(expungeEverythingByType(TermValueSet.class));
counter.addAndGet(expungeEverythingByType(TermConceptParentChildLink.class));
counter.addAndGet(expungeEverythingByType(TermConceptMapGroupElementTarget.class));
counter.addAndGet(expungeEverythingByType(TermConceptMapGroupElement.class));
counter.addAndGet(expungeEverythingByType(TermConceptMapGroup.class));
counter.addAndGet(expungeEverythingByType(TermConceptMap.class));
counter.addAndGet(expungeEverythingByType(TermConceptProperty.class));
counter.addAndGet(expungeEverythingByType(TermConceptDesignation.class));
counter.addAndGet(expungeEverythingByType(TermConcept.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionResourceEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchParamPresent.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(BulkImportJobFileEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(BulkImportJobEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ForcedId.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamDate.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamNumber.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamQuantity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamQuantityNormalized.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamString.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamToken.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamUri.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamCoords.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedComboStringUnique.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedComboTokenNonUnique.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceLink.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchResult.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchInclude.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSetConceptDesignation.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSetConcept.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSet.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptParentChildLink.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroupElementTarget.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroupElement.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroup.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMap.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptProperty.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptDesignation.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConcept.class));
myTxTemplate.execute(t -> {
for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) {
next.setCurrentVersion(null);
@ -160,52 +164,66 @@ public class ExpungeEverythingService {
}
return null;
});
counter.addAndGet(expungeEverythingByType(TermCodeSystemVersion.class));
counter.addAndGet(expungeEverythingByType(TermCodeSystem.class));
counter.addAndGet(expungeEverythingByType(SubscriptionTable.class));
counter.addAndGet(expungeEverythingByType(ResourceHistoryTag.class));
counter.addAndGet(expungeEverythingByType(ResourceTag.class));
counter.addAndGet(expungeEverythingByType(TagDefinition.class));
counter.addAndGet(expungeEverythingByType(ResourceHistoryProvenanceEntity.class));
counter.addAndGet(expungeEverythingByType(ResourceHistoryTable.class));
counter.addAndGet(expungeEverythingByType(ResourceTable.class));
counter.addAndGet(expungeEverythingByType(PartitionEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermCodeSystemVersion.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermCodeSystem.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SubscriptionTable.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryTag.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceTag.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TagDefinition.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryProvenanceEntity.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryTable.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceTable.class));
counter.addAndGet(expungeEverythingByTypeWithoutPurging(PartitionEntity.class));
myTxTemplate.execute(t -> {
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d"));
return null;
});
purgeAllCaches();
ourLog.info("COMPLETED GLOBAL $expunge - Deleted {} rows", counter.get());
}
private void purgeAllCaches() {
myTxTemplate.execute(t -> {
myMemoryCacheService.invalidateAllCaches();
return null;
});
}
private int expungeEverythingByTypeWithoutPurging(Class<?> theEntityType) {
int outcome = 0;
while (true) {
StopWatch sw = new StopWatch();
@SuppressWarnings("ConstantConditions")
int count = myTxTemplate.execute(t -> {
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
cq.from(theEntityType);
TypedQuery<?> query = myEntityManager.createQuery(cq);
query.setMaxResults(1000);
List<?> results = query.getResultList();
for (Object result : results) {
myEntityManager.remove(result);
}
return results.size();
});
outcome += count;
if (count == 0) {
break;
}
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
}
return outcome;
}
public int expungeEverythingByType(Class<?> theEntityType) {
int outcome = 0;
while (true) {
StopWatch sw = new StopWatch();
@SuppressWarnings("ConstantConditions")
int count = myTxTemplate.execute(t -> {
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
cq.from(theEntityType);
TypedQuery<?> query = myEntityManager.createQuery(cq);
query.setMaxResults(1000);
List<?> results = query.getResultList();
for (Object result : results) {
myEntityManager.remove(result);
}
return results.size();
});
outcome += count;
if (count == 0) {
break;
}
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
}
return outcome;
int result = expungeEverythingByTypeWithoutPurging(theEntityType);
purgeAllCaches();
return result;
}
private int doExpungeEverythingQuery(String theQuery) {

View File

@ -45,9 +45,12 @@ public class MdmLinkExpandSvc {
@Autowired
private IdHelperService myIdHelperService;
public MdmLinkExpandSvc() {
}
/**
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
* MDM-Matched to this resource.
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
* MDM-Matched to this resource.
*
* @param theResource The resource to MDM-Expand
* @return A set of strings representing the FHIR IDs of the expanded resources.

View File

@ -21,8 +21,9 @@ package ca.uhn.fhir.jpa.delete.job;
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.Job;
@ -45,7 +46,7 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
* Delete Expunge job.
*/
@Configuration
public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
public class DeleteExpungeJobConfig {
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
@Autowired
@ -53,11 +54,23 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Autowired
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
@Autowired
private SqlExecutorWriter mySqlExecutorWriter;
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
@Lazy
public Job deleteExpungeJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
public Job deleteExpungeJob() {
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
.validator(myMultiUrlProcessorParameterValidator)
.start(deleteExpungeUrlListStep())
.build();
}
@ -66,10 +79,10 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
public Step deleteExpungeUrlListStep() {
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
.<List<Long>, List<String>>chunk(1)
.reader(reverseCronologicalBatchResourcePidReader())
.reader(myReverseCronologicalBatchResourcePidReader)
.processor(deleteExpungeProcessor())
.writer(sqlExecutorWriter())
.listener(pidCountRecorderListener())
.writer(mySqlExecutorWriter)
.listener(myPidCountRecorderListener)
.listener(deleteExpungePromotionListener())
.build();
}

View File

@ -49,6 +49,10 @@ public class ReindexEverythingJobConfig {
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private ReindexWriter myReindexWriter;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Bean(name = REINDEX_EVERYTHING_JOB_NAME)
@Lazy
@ -63,8 +67,8 @@ public class ReindexEverythingJobConfig {
return myStepBuilderFactory.get(REINDEX_EVERYTHING_STEP_NAME)
.<List<Long>, List<Long>>chunk(1)
.reader(cronologicalBatchAllResourcePidReader())
.writer(reindexWriter())
.listener(reindexEverythingPidCountRecorderListener())
.writer(myReindexWriter)
.listener(myPidCountRecorderListener)
.listener(reindexEverythingPromotionListener())
.build();
}
@ -75,18 +79,6 @@ public class ReindexEverythingJobConfig {
return new CronologicalBatchAllResourcePidReader();
}
@Bean
@StepScope
public ReindexWriter reindexWriter() {
return new ReindexWriter();
}
@Bean
@StepScope
public PidReaderCounterListener reindexEverythingPidCountRecorderListener() {
return new PidReaderCounterListener();
}
@Bean
public ExecutionContextPromotionListener reindexEverythingPromotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();

View File

@ -21,15 +21,17 @@ package ca.uhn.fhir.jpa.reindex.job;
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -44,19 +46,30 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME;
* Reindex job.
*/
@Configuration
public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
public class ReindexJobConfig {
public static final String REINDEX_URL_LIST_STEP_NAME = "reindex-url-list-step";
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private ReindexWriter myReindexWriter;
@Autowired
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Autowired
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
@Bean(name = REINDEX_JOB_NAME)
@Lazy
public Job reindexJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
public Job reindexJob() {
return myJobBuilderFactory.get(REINDEX_JOB_NAME)
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
.validator(myMultiUrlProcessorParameterValidator)
.start(reindexUrlListStep())
.build();
}
@ -65,19 +78,13 @@ public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
public Step reindexUrlListStep() {
return myStepBuilderFactory.get(REINDEX_URL_LIST_STEP_NAME)
.<List<Long>, List<Long>>chunk(1)
.reader(reverseCronologicalBatchResourcePidReader())
.writer(reindexWriter())
.listener(pidCountRecorderListener())
.reader(myReverseCronologicalBatchResourcePidReader)
.writer(myReindexWriter)
.listener(myPidCountRecorderListener)
.listener(reindexPromotionListener())
.build();
}
@Bean
@StepScope
public ReindexWriter reindexWriter() {
return new ReindexWriter();
}
@Bean
public ExecutionContextPromotionListener reindexPromotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();

View File

@ -988,9 +988,12 @@ public class QueryStack {
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
List<IQueryParameterType> tokens = new ArrayList<>();
List<IQueryParameterType> tokens = new ArrayList<>();
boolean paramInverted = false;
TokenParamModifier modifier = null;
for (IQueryParameterType nextOr : theList) {
if (nextOr instanceof TokenParam) {
if (!((TokenParam) nextOr).isEmpty()) {
TokenParam id = (TokenParam) nextOr;
@ -1009,17 +1012,20 @@ public class QueryStack {
}
return createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, null, theRequestPartitionId);
}
modifier = id.getModifier();
// for :not modifier, create a token and remove the :not modifier
if (modifier != null && modifier == TokenParamModifier.NOT) {
tokens.add(new TokenParam(((TokenParam) nextOr).getSystem(), ((TokenParam) nextOr).getValue()));
paramInverted = true;
} else {
tokens.add(nextOr);
}
tokens.add(nextOr);
}
} else {
tokens.add(nextOr);
}
}
if (tokens.isEmpty()) {
@ -1027,14 +1033,37 @@ public class QueryStack {
}
String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName());
Condition predicate;
BaseJoiningPredicateBuilder join;
if (paramInverted) {
SearchQueryBuilder sqlBuilder = mySqlBuilder.newChildSqlBuilder();
TokenPredicateBuilder tokenSelector = sqlBuilder.addTokenPredicateBuilder(null);
sqlBuilder.addPredicate(tokenSelector.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theRequestPartitionId));
SelectQuery sql = sqlBuilder.getSelect();
Expression subSelect = new Subquery(sql);
join = mySqlBuilder.getOrCreateFirstPredicateBuilder();
if (theSourceJoinColumn == null) {
predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
} else {
//-- for the resource link, need join with target_resource_id
predicate = new InCondition(theSourceJoinColumn, subSelect).setNegate(true);
}
} else {
TokenPredicateBuilder tokenJoin = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult();
TokenPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult();
if (theList.get(0).getMissing() != null) {
return tokenJoin.createPredicateParamMissingForNonReference(theResourceName, paramName, theList.get(0).getMissing(), theRequestPartitionId);
}
if (theList.get(0).getMissing() != null) {
return join.createPredicateParamMissingForNonReference(theResourceName, paramName, theList.get(0).getMissing(), theRequestPartitionId);
}
Condition predicate = join.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId);
predicate = tokenJoin.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId);
join = tokenJoin;
}
return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
}

View File

@ -221,11 +221,15 @@ public class SearchBuilder implements ISearchBuilder {
SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
// Handle _id last, since it can typically be tacked onto a different parameter
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID)).collect(Collectors.toList());
// Handle _id and _tag last, since they can typically be tacked onto a different parameter
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID))
.filter(t -> !t.equals(Constants.PARAM_TAG)).collect(Collectors.toList());
if (myParams.containsKey(IAnyResource.SP_RES_ID)) {
paramNames.add(IAnyResource.SP_RES_ID);
}
if (myParams.containsKey(Constants.PARAM_TAG)) {
paramNames.add(Constants.PARAM_TAG);
}
// Handle each parameter
for (String nextParamName : paramNames) {

View File

@ -62,6 +62,7 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.SpecialParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -338,6 +339,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
boolean foundChainMatch = false;
List<String> candidateTargetTypes = new ArrayList<>();
List<Condition> orPredicates = new ArrayList<>();
boolean paramInverted = false;
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
for (String nextType : resourceTypes) {
String chain = theReferenceParam.getChain();
@ -383,6 +385,13 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
if (chainValue == null) {
continue;
}
// For the token param, if it's a :not modifier, need switch OR to AND
if (!paramInverted && chainValue instanceof TokenParam) {
if (((TokenParam) chainValue).getModifier() == TokenParamModifier.NOT) {
paramInverted = true;
}
}
foundChainMatch = true;
orValues.add(chainValue);
}
@ -410,10 +419,17 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
warnAboutPerformanceOnUnqualifiedResources(theParamName, theRequest, candidateTargetTypes);
}
Condition multiTypeOrPredicate = toOrPredicate(orPredicates);
// If :not modifier for a token, switch OR with AND in the multi-type case
Condition multiTypePredicate;
if (paramInverted) {
multiTypePredicate = toAndPredicate(orPredicates);
} else {
multiTypePredicate = toOrPredicate(orPredicates);
}
List<String> pathsToMatch = createResourceLinkPaths(theResourceName, theParamName);
Condition pathPredicate = createPredicateSourcePaths(pathsToMatch);
return toAndPredicate(pathPredicate, multiTypeOrPredicate);
return toAndPredicate(pathPredicate, multiTypePredicate);
}
@Nonnull

View File

@ -225,15 +225,15 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
}
public static String createLeftAndRightMatchLikeExpression(String likeExpression) {
return "%" + likeExpression.replace("%", "[%]") + "%";
return "%" + likeExpression.replace("%", "\\%") + "%";
}
public static String createLeftMatchLikeExpression(String likeExpression) {
return likeExpression.replace("%", "[%]") + "%";
return likeExpression.replace("%", "\\%") + "%";
}
public static String createRightMatchLikeExpression(String likeExpression) {
return "%" + likeExpression.replace("%", "[%]");
return "%" + likeExpression.replace("%", "\\%");
}

View File

@ -2005,9 +2005,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
result.setCodeDisplay(code.getDisplay());
for (TermConceptDesignation next : code.getDesignations()) {
IValidationSupport.ConceptDesignation designation = new IValidationSupport.ConceptDesignation();
// filter out the designation based on displayLanguage if any
if (isDisplayLanguageMatch(theDisplayLanguage, next.getLanguage())) {
IValidationSupport.ConceptDesignation designation = new IValidationSupport.ConceptDesignation();
designation.setLanguage(next.getLanguage());
designation.setUseSystem(next.getUseSystem());
designation.setUseCode(next.getUseCode());

View File

@ -35,6 +35,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.InstantType;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Embedded;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
@ -96,7 +97,7 @@ public class TestUtil {
.collect(Collectors.toSet());
}
ImmutableSet<ClassInfo> classes = ClassPath.from(TestUtil.class.getClassLoader()).getTopLevelClasses(packageName);
ImmutableSet<ClassInfo> classes = ClassPath.from(TestUtil.class.getClassLoader()).getTopLevelClassesRecursive(packageName);
Set<String> names = new HashSet<String>();
if (classes.size() <= 1) {
@ -106,7 +107,8 @@ public class TestUtil {
for (ClassInfo classInfo : classes) {
Class<?> clazz = Class.forName(classInfo.getName());
Entity entity = clazz.getAnnotation(Entity.class);
if (entity == null) {
Embeddable embeddable = clazz.getAnnotation(Embeddable.class);
if (entity == null && embeddable == null) {
continue;
}

View File

@ -331,7 +331,8 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
BatchJobsConfig.BULK_EXPORT_JOB_NAME,
BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME,
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME,
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME,
BatchJobsConfig.MDM_CLEAR_JOB_NAME
);
}

View File

@ -63,6 +63,7 @@ import org.hl7.fhir.dstu3.model.AllergyIntolerance;
import org.hl7.fhir.dstu3.model.Appointment;
import org.hl7.fhir.dstu3.model.AuditEvent;
import org.hl7.fhir.dstu3.model.Binary;
import org.hl7.fhir.dstu3.model.BodySite;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.CarePlan;
import org.hl7.fhir.dstu3.model.CodeSystem;
@ -95,6 +96,7 @@ import org.hl7.fhir.dstu3.model.Organization;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.dstu3.model.Practitioner;
import org.hl7.fhir.dstu3.model.PractitionerRole;
import org.hl7.fhir.dstu3.model.Procedure;
import org.hl7.fhir.dstu3.model.ProcedureRequest;
import org.hl7.fhir.dstu3.model.Questionnaire;
import org.hl7.fhir.dstu3.model.QuestionnaireResponse;
@ -317,6 +319,12 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Qualifier("myTaskDaoDstu3")
protected IFhirResourceDao<Task> myTaskDao;
@Autowired
@Qualifier("myBodySiteDaoDstu3")
protected IFhirResourceDao<BodySite> myBodySiteDao;
@Autowired
@Qualifier("myProcedureDaoDstu3")
protected IFhirResourceDao<Procedure> myProcedureDao;
@Autowired
protected ITermConceptDao myTermConceptDao;
@Autowired
protected ITermCodeSystemDao myTermCodeSystemDao;

View File

@ -116,12 +116,16 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myDaoConfig.setMaximumDeleteConflictQueryCount(new DaoConfig().getMaximumDeleteConflictQueryCount());
myDaoConfig.setBundleBatchPoolSize(new DaoConfig().getBundleBatchPoolSize());
myDaoConfig.setBundleBatchMaxPoolSize(new DaoConfig().getBundleBatchMaxPoolSize());
}
@BeforeEach
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
myDaoConfig.setBundleBatchPoolSize(1);
myDaoConfig.setBundleBatchMaxPoolSize(1);
}
private Bundle createInputTransactionWithPlaceholderIdInMatchUrl(HTTPVerb theVerb) {

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.test.concurrency.PointcutLatch;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -40,6 +41,8 @@ public class ExpungeHookTest extends BaseJpaDstu3Test {
@BeforeEach
public void before() {
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setResourceClientIdStrategy(DaoConfig.ClientIdStrategyEnum.ALPHANUMERIC);
myDaoConfig.setAutoCreatePlaceholderReferenceTargets(true);
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, myEverythingLatch);
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, myExpungeResourceLatch);
}
@ -65,6 +68,42 @@ public class ExpungeHookTest extends BaseJpaDstu3Test {
assertPatientGone(id);
}
@Test
public void expungeEverythingAndRecreate() throws InterruptedException {
// Create a patient.
Patient thePatient = new Patient();
thePatient.setId("ABC123");
Meta theMeta = new Meta();
theMeta.addProfile("http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient");
thePatient.setMeta(theMeta);
IIdType id = myPatientDao.update(thePatient, mySrd).getId();
assertNotNull(myPatientDao.read(id));
// Expunge it directly.
myPatientDao.delete(id);
ExpungeOptions options = new ExpungeOptions();
options.setExpungeEverything(true);
options.setExpungeDeletedResources(true);
options.setExpungeOldVersions(true);
myPatientDao.expunge(id.toUnqualifiedVersionless(), options, mySrd);
assertPatientGone(id);
// Create it a second time.
myPatientDao.update(thePatient, mySrd);
assertNotNull(myPatientDao.read(id));
// Expunge everything with the service.
myEverythingLatch.setExpectedCount(1);
myExpungeService.expunge(null, null, null, options, mySrd);
myEverythingLatch.awaitExpected();
assertPatientGone(id);
// Create it a third time.
myPatientDao.update(thePatient, mySrd);
assertNotNull(myPatientDao.read(id));
}
private void assertPatientGone(IIdType theId) {
try {
myPatientDao.read(theId);

View File

@ -20,6 +20,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
@ -126,6 +127,7 @@ import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
@ -150,12 +152,14 @@ import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAM_TYPE;
import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.hasSize;
@ -1356,6 +1360,34 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
assertThat(actual, contains(id));
}
@Test
@DisplayName("Duplicate Conditional Creates all resolve to the same match")
public void testDuplicateConditionalCreatesOnToken() throws IOException {
String inputString = IOUtils.toString(getClass().getResourceAsStream("/duplicate-conditional-create.json"), StandardCharsets.UTF_8);
Bundle firstBundle = myFhirCtx.newJsonParser().parseResource(Bundle.class, inputString);
//Before you ask, yes, this has to be separately parsed. The reason for this is that the parameters passed to mySystemDao.transaction are _not_ immutable, so we cannot
//simply reuse the original bundle object.
Bundle duplicateBundle = myFhirCtx.newJsonParser().parseResource(Bundle.class, inputString);
Bundle bundleResponse = mySystemDao.transaction(new SystemRequestDetails(), firstBundle);
bundleResponse.getEntry()
.forEach( entry -> assertThat(entry.getResponse().getStatus(), is(equalTo("201 Created"))));
IBundleProvider search = myOrganizationDao.search(new SearchParameterMap().setLoadSynchronous(true));
assertEquals(1, search.getAllResources().size());
//Running the bundle again should just result in 0 new resources created, as the org should already exist, and there is no update to the SR.
bundleResponse= mySystemDao.transaction(new SystemRequestDetails(), duplicateBundle);
bundleResponse.getEntry()
.forEach( entry -> {
assertThat(entry.getResponse().getStatus(), is(equalTo("200 OK")));
});
search = myOrganizationDao.search(new SearchParameterMap().setLoadSynchronous(true), new SystemRequestDetails());
assertEquals(1, search.getAllResources().size());
}
@Test
public void testIndexNoDuplicatesToken() {
Patient res = new Patient();

View File

@ -21,18 +21,29 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BodyStructure;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.Device;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Procedure;
import org.hl7.fhir.r4.model.Provenance;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.UriType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@ -906,6 +917,117 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
}
@Test
public void testSearchOnUnderscoreParams_AvoidHFJResourceJoins() {
// This Issue: https://github.com/hapifhir/hapi-fhir/issues/2942
// See this PR for a similar type of Fix: https://github.com/hapifhir/hapi-fhir/pull/2909
SearchParameter searchParameter = new SearchParameter();
searchParameter.addBase("BodySite").addBase("Procedure");
searchParameter.setCode("focalAccess");
searchParameter.setType(Enumerations.SearchParamType.REFERENCE);
searchParameter.setExpression("Procedure.extension('Procedure#focalAccess')");
searchParameter.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
IIdType spId = mySearchParameterDao.create(searchParameter).getId().toUnqualifiedVersionless();
mySearchParamRegistry.forceRefresh();
BodyStructure bs = new BodyStructure();
bs.setDescription("BodyStructure in R4 replaced BodySite from DSTU4");
IIdType bsId = myBodyStructureDao.create(bs, mySrd).getId().toUnqualifiedVersionless();
Patient patient = new Patient();
patient.setId("P1");
patient.setActive(true);
patient.addName().setFamily("FamilyName");
Extension extParent = patient
.addExtension()
.setUrl("http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity");
extParent
.addExtension()
.setUrl("ombCategory")
.setValue(new CodeableConcept().addCoding(new Coding().setSystem("urn:oid:2.16.840.1.113883.5.50")
.setCode("2186-5")
.setDisplay("Not Hispanic or Latino")));
extParent
.addExtension()
.setUrl("text")
.setValue(new StringType("Not Hispanic or Latino"));
myPatientDao.update(patient);
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().addCoding(new Coding().setSystem("acc_proccat_fkc")
.setCode("CANN")
.setDisplay("Cannulation"));
Procedure procedure = new Procedure();
procedure.setSubject(new Reference("Patient/P1"));
procedure.setStatus(Procedure.ProcedureStatus.COMPLETED);
procedure.setCategory(categoryCodeableConcept1);
Extension extProcedure = procedure
.addExtension()
.setUrl("Procedure#focalAccess")
.setValue(new UriType("BodyStructure/" + bsId.getIdPartAsLong()));
procedure.getMeta()
.addTag("acc_procext_fkc", "1STCANN2NDL", "First Successful Cannulation with 2 Needles");
IIdType procedureId = myProcedureDao.create(procedure).getId().toUnqualifiedVersionless();
logAllResources();
logAllResourceTags();
logAllResourceVersions();
// Search example 1:
// http://FHIR_SERVER/fhir_request/Procedure
// ?status%3Anot=entered-in-error&subject=B
// &category=CANN&focalAccess=BodySite%2F3530342921&_tag=TagValue
// NOTE: This gets sorted once so the order is different once it gets executed!
{
// IMPORTANT: Keep the query param order exactly as shown below!
SearchParameterMap map = SearchParameterMap.newSynchronous();
// _tag, category, status, subject, focalAccess
map.add("_tag", new TokenParam("TagValue"));
map.add("category", new TokenParam("CANN"));
map.add("status", new TokenParam("entered-in-error").setModifier(TokenParamModifier.NOT));
map.add("subject", new ReferenceParam("Patient/P1"));
map.add("focalAccess", new ReferenceParam("BodyStructure/" + bsId.getIdPart()));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myProcedureDao.search(map, new SystemRequestDetails());
ourLog.info("Search returned {} resources.", outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
// Check for a particular WHERE CLAUSE in the generated SQL to make sure we are verifying the correct query
assertEquals(2, StringUtils.countMatches(selectQuery.toLowerCase(), " join hfj_res_link "), selectQuery);
// Ensure that we do NOT see a couple of particular WHERE clauses
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_type = 'procedure'"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_deleted_at is null"), selectQuery);
}
// Search example 2:
// http://FHIR_SERVER/fhir_request/Procedure
// ?status%3Anot=entered-in-error&category=CANN&focalAccess=3692871435
// &_tag=1STCANN1NDL%2C1STCANN2NDL&outcome=SUCCESS&_count=1&_requestTrace=True
// NOTE: This gets sorted once so the order is different once it gets executed!
{
// IMPORTANT: Keep the query param order exactly as shown below!
// NOTE: The "outcome" SearchParameter is not being used below, but it doesn't affect the test.
SearchParameterMap map = SearchParameterMap.newSynchronous();
// _tag, category, status, focalAccess
map.add("_tag", new TokenParam("TagValue"));
map.add("category", new TokenParam("CANN"));
map.add("status", new TokenParam("entered-in-error").setModifier(TokenParamModifier.NOT));
map.add("focalAccess", new ReferenceParam("BodyStructure/" + bsId.getIdPart()));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myProcedureDao.search(map, new SystemRequestDetails());
ourLog.info("Search returned {} resources.", outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
// Check for a particular WHERE CLAUSE in the generated SQL to make sure we are verifying the correct query
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), " join hfj_res_link "), selectQuery);
// Ensure that we do NOT see a couple of particular WHERE clauses
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_type = 'procedure'"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_deleted_at is null"), selectQuery);
}
}
@AfterEach
public void afterResetDao() {

View File

@ -117,12 +117,16 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myDaoConfig.setAllowInlineMatchUrlReferences(false);
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
myDaoConfig.setBundleBatchPoolSize(new DaoConfig().getBundleBatchPoolSize());
myDaoConfig.setBundleBatchMaxPoolSize(new DaoConfig().getBundleBatchMaxPoolSize());
}
@BeforeEach
public void beforeDisableResultReuse() {
myInterceptorRegistry.registerInterceptor(myInterceptor);
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
myDaoConfig.setBundleBatchPoolSize(1);
myDaoConfig.setBundleBatchMaxPoolSize(1);
}
private Bundle createInputTransactionWithPlaceholderIdInMatchUrl(HTTPVerb theVerb) {

View File

@ -2272,7 +2272,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM = 'http://system'"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "t1.HASH_SYS_AND_VALUE ="), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, ".HASH_SYS_AND_VALUE ="), searchSql);
}

View File

@ -1,9 +1,9 @@
package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -111,7 +111,7 @@ public class ReindexJobTest extends BaseJpaR4Test {
private JobParameters buildEverythingJobParameters(Long theBatchSize) {
Map<String, JobParameter> map = new HashMap<>();
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
JobParameters parameters = new JobParameters(map);
return parameters;

View File

@ -0,0 +1,77 @@
package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import org.hl7.fhir.dstu3.model.BodySite;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Encounter;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.dstu3.model.Procedure;
import org.hl7.fhir.dstu3.model.Reference;
import org.hl7.fhir.dstu3.model.SearchParameter;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Collections;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class ResourceProviderSearchModifierDstu3Test extends BaseResourceProviderDstu3Test{
@Autowired
MatchUrlService myMatchUrlService;
@Test
public void testReplicateBugWithNotDuringChain() {
Encounter enc = new Encounter();
enc.setType(Collections.singletonList(new CodeableConcept().addCoding(new Coding("system", "value", "display"))));
IIdType encId = myEncounterDao.create(enc).getId();
Observation obs = new Observation();
obs.setContext(new Reference(encId));
myObservationDao.create(obs).getId();
{
//Works when not chained:
String encounterSearchString = "Encounter?type:not=system|value";
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(encounterSearchString);
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
IBundleProvider search = myEncounterDao.search(searchParameterMap);
assertThat(search.size(), is(equalTo(0)));
}
{
//Works without the NOT qualifier.
String resultSearchString = "Observation?context.type=system|value";
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(resultSearchString);
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
IBundleProvider search = myObservationDao.search(searchParameterMap);
assertThat(search.size(), is(equalTo(1)));
}
{
//Works in a chain
String noResultSearchString = "Observation?context.type:not=system|value";
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(noResultSearchString);
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
IBundleProvider search = myObservationDao.search(searchParameterMap);
assertThat(search.size(), is(equalTo(0)));
}
{
//Works in a chain with only value
String noResultSearchString = "Observation?context.type:not=value";
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(noResultSearchString);
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
IBundleProvider search = myObservationDao.search(searchParameterMap);
assertThat(search.size(), is(equalTo(0)));
}
}
}

View File

@ -15,6 +15,7 @@ import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.DecimalType;
@ -99,8 +100,7 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
assertEquals("Patient", interceptor.resourceDefs.get(0).getName());
myInterceptorRegistry.unregisterInterceptor(interceptor);
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
Long jobId = jobIdPrimitive.getValue().longValue();
Long jobId = BatchHelperR4.jobIdFromParameters(response);
assertEquals(1, myBatchJobHelper.getReadCount(jobId));
assertEquals(1, myBatchJobHelper.getWriteCount(jobId));

View File

@ -45,33 +45,15 @@ public class ResourceProviderR4CodeSystemDesignationTest extends BaseResourcePro
String resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have de-AT and default language
List<ParametersParameterComponent> parameterList = respParam.getParameter();
verifyParameterList(parameterList);
List<ParametersParameterComponent> designationList = getDesignations(parameterList);
assertEquals("display", respParam.getParameter().get(0).getName());
assertEquals(("Systolic blood pressure 12 hour minimum"), ((StringType) respParam.getParameter().get(0).getValue()).getValue());
assertEquals("abstract", respParam.getParameter().get(1).getName());
assertEquals(false, ((BooleanType) respParam.getParameter().get(1).getValue()).getValue());
//-- designationList
assertEquals(2, designationList.size());
// 1. de-AT:Systolic blood pressure 12 hour minimum
ParametersParameterComponent designation = designationList.get(0);
assertEquals("language", designation.getPart().get(0).getName());
assertEquals("de-AT", designation.getPart().get(0).getValue().toString());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("de-AT:Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
// 2. Systolic blood pressure 12 hour minimum (no language)
designation = designationList.get(1);
assertEquals("language", designation.getPart().get(0).getName());
assertNull(designation.getPart().get(0).getValue());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
// should be de-AT and default
assertEquals(2, designationList.size());
verifyDesignationDeAT(designationList.get(0));
verifyDesignationNoLanguage(designationList.get(1));
}
@ -89,25 +71,14 @@ public class ResourceProviderR4CodeSystemDesignationTest extends BaseResourcePro
String resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have default language only
List<ParametersParameterComponent> parameterList = respParam.getParameter();
verifyParameterList(parameterList);
List<ParametersParameterComponent> designationList = getDesignations(parameterList);
assertEquals("display", respParam.getParameter().get(0).getName());
assertEquals(("Systolic blood pressure 12 hour minimum"), ((StringType) respParam.getParameter().get(0).getValue()).getValue());
assertEquals("abstract", respParam.getParameter().get(1).getName());
assertEquals(false, ((BooleanType) respParam.getParameter().get(1).getValue()).getValue());
//-- designationList
assertEquals(1, designationList.size());
// 1. Systolic blood pressure 12 hour minimum (no language)
ParametersParameterComponent designation = designationList.get(0);
assertEquals("language", designation.getPart().get(0).getName());
assertNull(designation.getPart().get(0).getValue());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
// should be default only
assertEquals(1, designationList.size());
verifyDesignationNoLanguage(designationList.get(0));
}
@ -124,41 +95,145 @@ public class ResourceProviderR4CodeSystemDesignationTest extends BaseResourcePro
String resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have all languages and the default language
List<ParametersParameterComponent> parameterList = respParam.getParameter();
verifyParameterList(parameterList);
List<ParametersParameterComponent> designationList = getDesignations(parameterList);
assertEquals("display", respParam.getParameter().get(0).getName());
assertEquals(("Systolic blood pressure 12 hour minimum"), ((StringType) respParam.getParameter().get(0).getValue()).getValue());
assertEquals("abstract", respParam.getParameter().get(1).getName());
assertEquals(false, ((BooleanType) respParam.getParameter().get(1).getValue()).getValue());
// designation should be fr-FR, De-AT and default
assertEquals(3, designationList.size());
verifyDesignationfrFR(designationList.get(0));
verifyDesignationDeAT(designationList.get(1));
verifyDesignationNoLanguage(designationList.get(2));
//-- designationList
assertEquals(3, designationList.size());
}
@Test
public void testLookupWithDisplayLanguageCaching() {
// 1. fr-FR:Systolic blood pressure 12 hour minimum
ParametersParameterComponent designation = designationList.get(0);
//-- first call with de-AT
Parameters respParam = myClient
.operation()
.onType(CodeSystem.class)
.named("lookup")
.withParameter(Parameters.class, "code", new CodeType("8494-7"))
.andParameter("system", new UriType(CS_ACME_URL))
.andParameter("displayLanguage",new CodeType("de-AT"))
.execute();
String resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have de-AT and default language
List<ParametersParameterComponent> parameterList = respParam.getParameter();
verifyParameterList(parameterList);
List<ParametersParameterComponent> designationList = getDesignations(parameterList);
// should be de-AT and default
assertEquals(2, designationList.size());
verifyDesignationDeAT(designationList.get(0));
verifyDesignationNoLanguage(designationList.get(1));
//-- second call with zh-CN (not-exist)
respParam = myClient
.operation()
.onType(CodeSystem.class)
.named("lookup")
.withParameter(Parameters.class, "code", new CodeType("8494-7"))
.andParameter("system", new UriType(CS_ACME_URL))
.andParameter("displayLanguage",new CodeType("zh-CN"))
.execute();
resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have default language only
parameterList = respParam.getParameter();
verifyParameterList(parameterList);
designationList = getDesignations(parameterList);
// should be default only
assertEquals(1, designationList.size());
verifyDesignationNoLanguage(designationList.get(0));
//-- third call with no language
respParam = myClient
.operation()
.onType(CodeSystem.class)
.named("lookup")
.withParameter(Parameters.class, "code", new CodeType("8494-7"))
.andParameter("system", new UriType(CS_ACME_URL))
.execute();
resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have all languages and the default language
parameterList = respParam.getParameter();
verifyParameterList(parameterList);
designationList = getDesignations(parameterList);
// designation should be fr-FR, De-AT and default
assertEquals(3, designationList.size());
verifyDesignationfrFR(designationList.get(0));
verifyDesignationDeAT(designationList.get(1));
verifyDesignationNoLanguage(designationList.get(2));
//-- forth call with fr-FR
respParam = myClient
.operation()
.onType(CodeSystem.class)
.named("lookup")
.withParameter(Parameters.class, "code", new CodeType("8494-7"))
.andParameter("system", new UriType(CS_ACME_URL))
.andParameter("displayLanguage",new CodeType("fr-FR"))
.execute();
resp = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
//-- The designations should have fr-FR languages and the default language
parameterList = respParam.getParameter();
verifyParameterList(parameterList);
designationList = getDesignations(parameterList);
// designation should be fr-FR, default
assertEquals(2, designationList.size());
verifyDesignationfrFR(designationList.get(0));
verifyDesignationNoLanguage(designationList.get(1));
}
private void verifyParameterList(List<ParametersParameterComponent> parameterList) {
assertEquals("display", parameterList.get(0).getName());
assertEquals(("Systolic blood pressure 12 hour minimum"),
((StringType) parameterList.get(0).getValue()).getValue());
assertEquals("abstract", parameterList.get(1).getName());
assertEquals(false, ((BooleanType) parameterList.get(1).getValue()).getValue());
}
private void verifyDesignationfrFR(ParametersParameterComponent designation) {
assertEquals("language", designation.getPart().get(0).getName());
assertEquals("fr-FR", designation.getPart().get(0).getValue().toString());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("fr-FR:Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
// 2. de-AT:Systolic blood pressure 12 hour minimum
designation = designationList.get(1);
}
private void verifyDesignationDeAT(ParametersParameterComponent designation) {
assertEquals("language", designation.getPart().get(0).getName());
assertEquals("de-AT", designation.getPart().get(0).getValue().toString());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("de-AT:Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
// 3. Systolic blood pressure 12 hour minimum (no language)
designation = designationList.get(2);
}
private void verifyDesignationNoLanguage(ParametersParameterComponent designation) {
assertEquals("language", designation.getPart().get(0).getName());
assertNull(designation.getPart().get(0).getValue());
assertEquals("value", designation.getPart().get(2).getName());
assertEquals("Systolic blood pressure 12 hour minimum", designation.getPart().get(2).getValue().toString());
}
private List<ParametersParameterComponent> getDesignations(List<ParametersParameterComponent> parameterList) {
List<ParametersParameterComponent> designationList = new ArrayList<>();
@ -168,6 +243,5 @@ public class ResourceProviderR4CodeSystemDesignationTest extends BaseResourcePro
designationList.add(parameter);
}
return designationList;
}
}

View File

@ -295,7 +295,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
myCaptureQueriesListener.logSelectQueries();
}
@Test
public void testSearchWithContainsLowerCase() {
myDaoConfig.setAllowContainsSearches(true);
@ -333,6 +332,37 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
@Test
public void testSearchWithPercentSign() {
myDaoConfig.setAllowContainsSearches(true);
Patient pt1 = new Patient();
pt1.addName().setFamily("Smith%");
String pt1id = myPatientDao.create(pt1).getId().toUnqualifiedVersionless().getValue();
Bundle output = myClient
.search()
.forResource("Patient")
.where(Patient.NAME.contains().value("Smith%"))
.returnBundle(Bundle.class)
.execute();
List<String> ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
assertThat(ids, containsInAnyOrder(pt1id));
Patient pt2 = new Patient();
pt2.addName().setFamily("Sm%ith");
String pt2id = myPatientDao.create(pt2).getId().toUnqualifiedVersionless().getValue();
output = myClient
.search()
.forResource("Patient")
.where(Patient.NAME.contains().value("Sm%ith"))
.returnBundle(Bundle.class)
.execute();
ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
assertThat(ids, containsInAnyOrder(pt2id));
}
@Test
public void testSearchWithDateInvalid() throws IOException {
HttpGet get = new HttpGet(ourServerBase + "/Condition?onset-date=junk");

View File

@ -0,0 +1,249 @@
package ca.uhn.fhir.jpa.provider.r4;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Quantity;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
public class ResourceProviderSearchModifierR4Test extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderSearchModifierR4Test.class);
private CapturingInterceptor myCapturingInterceptor = new CapturingInterceptor();
@Override
@AfterEach
public void after() throws Exception {
super.after();
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
myDaoConfig.setCountSearchResultsUpTo(new DaoConfig().getCountSearchResultsUpTo());
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myClient.unregisterInterceptor(myCapturingInterceptor);
}
@BeforeEach
@Override
public void before() throws Exception {
super.before();
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
myDaoConfig.setAllowMultipleDelete(true);
myClient.registerInterceptor(myCapturingInterceptor);
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
}
@BeforeEach
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
}
@Test
public void testSearch_SingleCode_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, false);
String uri = ourServerBase + "/Observation?code:not=2345-3";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(9, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(2).toString(), ids.get(2));
assertEquals(obsList.get(4).toString(), ids.get(3));
assertEquals(obsList.get(5).toString(), ids.get(4));
assertEquals(obsList.get(6).toString(), ids.get(5));
assertEquals(obsList.get(7).toString(), ids.get(6));
assertEquals(obsList.get(8).toString(), ids.get(7));
assertEquals(obsList.get(9).toString(), ids.get(8));
}
@Test
public void testSearch_SingleCode_multiple_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, false);
String uri = ourServerBase + "/Observation?code:not=2345-3&code:not=2345-7&code:not=2345-9";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(7, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(2).toString(), ids.get(2));
assertEquals(obsList.get(4).toString(), ids.get(3));
assertEquals(obsList.get(5).toString(), ids.get(4));
assertEquals(obsList.get(6).toString(), ids.get(5));
assertEquals(obsList.get(8).toString(), ids.get(6));
}
@Test
public void testSearch_SingleCode_mix_modifier() throws Exception {
List<IIdType> obsList = createObs(10, false);
// Observation?code:not=2345-3&code:not=2345-7&code:not=2345-9
// slower than Observation?code:not=2345-3&code=2345-7&code:not=2345-9
String uri = ourServerBase + "/Observation?code:not=2345-3&code=2345-7&code:not=2345-9";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(1, ids.size());
assertEquals(obsList.get(7).toString(), ids.get(0));
}
@Test
public void testSearch_SingleCode_or_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, false);
String uri = ourServerBase + "/Observation?code:not=2345-3,2345-7,2345-9";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(7, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(2).toString(), ids.get(2));
assertEquals(obsList.get(4).toString(), ids.get(3));
assertEquals(obsList.get(5).toString(), ids.get(4));
assertEquals(obsList.get(6).toString(), ids.get(5));
assertEquals(obsList.get(8).toString(), ids.get(6));
}
@Test
public void testSearch_MultiCode_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, true);
String uri = ourServerBase + "/Observation?code:not=2345-3";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(8, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(4).toString(), ids.get(2));
assertEquals(obsList.get(5).toString(), ids.get(3));
assertEquals(obsList.get(6).toString(), ids.get(4));
assertEquals(obsList.get(7).toString(), ids.get(5));
assertEquals(obsList.get(8).toString(), ids.get(6));
assertEquals(obsList.get(9).toString(), ids.get(7));
}
@Test
public void testSearch_MultiCode_multiple_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, true);
String uri = ourServerBase + "/Observation?code:not=2345-3&code:not=2345-4";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(7, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(5).toString(), ids.get(2));
assertEquals(obsList.get(6).toString(), ids.get(3));
assertEquals(obsList.get(7).toString(), ids.get(4));
assertEquals(obsList.get(8).toString(), ids.get(5));
assertEquals(obsList.get(9).toString(), ids.get(6));
}
@Test
public void testSearch_MultiCode_mix_modifier() throws Exception {
List<IIdType> obsList = createObs(10, true);
String uri = ourServerBase + "/Observation?code:not=2345-3&code=2345-7&code:not=2345-9";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(2, ids.size());
assertEquals(obsList.get(6).toString(), ids.get(0));
assertEquals(obsList.get(7).toString(), ids.get(1));
}
@Test
public void testSearch_MultiCode_or_not_modifier() throws Exception {
List<IIdType> obsList = createObs(10, true);
String uri = ourServerBase + "/Observation?code:not=2345-3,2345-7,2345-9";
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
assertEquals(4, ids.size());
assertEquals(obsList.get(0).toString(), ids.get(0));
assertEquals(obsList.get(1).toString(), ids.get(1));
assertEquals(obsList.get(4).toString(), ids.get(2));
assertEquals(obsList.get(5).toString(), ids.get(3));
}
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String uri) throws IOException {
List<String> ids;
HttpGet get = new HttpGet(uri);
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info("Response was: {}", resp);
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp);
ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
ids = toUnqualifiedVersionlessIdValues(bundle);
}
return ids;
}
private List<IIdType> createObs(int obsNum, boolean isMultiple) {
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe");
IIdType pid = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
List<IIdType> obsIds = new ArrayList<>();
IIdType obsId = null;
for (int i=0; i<obsNum; i++) {
Observation obs = new Observation();
obs.setStatus(ObservationStatus.FINAL);
obs.getSubject().setReferenceElement(pid);
obs.setEffective(new DateTimeType("2001-02-01"));
CodeableConcept cc = obs.getCode();
cc.addCoding().setCode("2345-"+i).setSystem("http://loinc.org");
obs.setValue(new Quantity().setValue(200));
if (isMultiple) {
cc = obs.getCode();
cc.addCoding().setCode("2345-"+(i+1)).setSystem("http://loinc.org");
obs.setValue(new Quantity().setValue(300));
}
obsId = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
obsIds.add(obsId);
}
return obsIds;
}
}

View File

@ -55,6 +55,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
@ -817,8 +818,7 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
ourLog.info(ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME);
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
Long jobId = jobIdPrimitive.getValue().longValue();
Long jobId = BatchHelperR4.jobIdFromParameters(response);
// validate

View File

@ -0,0 +1,66 @@
{
"resourceType": "Bundle",
"type": "transaction",
"entry": [
{
"fullUrl": "urn:uuid:4cd35592-5d4d-462b-8483-e404c023d316",
"resource": {
"resourceType": "Organization",
"identifier": [
{
"system": "https://fhir.tester.ca/NamingSystem/ca-on-health-care-facility-id",
"value": "3972"
}
]
},
"request": {
"method": "POST",
"url": "/Organization",
"ifNoneExist": "Organization?identifier=https://fhir.tester.ca/NamingSystem/ca-on-health-care-facility-id|3972"
}
},
{
"fullUrl": "urn:uuid:02643c1d-94d1-4991-a063-036fa0f57ec2",
"resource": {
"resourceType": "Organization",
"identifier": [
{
"system": "https://fhir.tester.ca/NamingSystem/ca-on-health-care-facility-id",
"value": "3972"
}
]
},
"request": {
"method": "POST",
"url": "/Organization",
"ifNoneExist": "Organization?identifier=https://fhir.tester.ca/NamingSystem/ca-on-health-care-facility-id|3972"
}
},
{
"fullUrl": "urn:uuid:8271e94f-e08b-498e-ad6d-751928c3ff99",
"resource": {
"resourceType": "ServiceRequest",
"identifier": [
{
"system": "https://fhir-tester.ca/NamingSystem/service-request-id",
"value": "1"
}
],
"performer": [
{
"reference": "urn:uuid:4cd35592-5d4d-462b-8483-e404c023d316",
"type": "Organization"
},
{
"reference": "urn:uuid:02643c1d-94d1-4991-a063-036fa0f57ec2",
"type": "Organization"
}
]
},
"request": {
"method": "PUT",
"url": "/ServiceRequest?identifier=https://fhir-tester.ca/NamingSystem/service-request-id|1"
}
}
]
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.mdm.config;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDeleteSvc;
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor;
import ca.uhn.fhir.jpa.mdm.broker.MdmMessageHandler;
@ -31,10 +32,8 @@ import ca.uhn.fhir.jpa.mdm.dao.MdmLinkFactory;
import ca.uhn.fhir.jpa.mdm.interceptor.IMdmStorageInterceptor;
import ca.uhn.fhir.jpa.mdm.interceptor.MdmStorageInterceptor;
import ca.uhn.fhir.jpa.mdm.svc.GoldenResourceMergerSvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmClearSvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmControllerSvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmEidUpdateService;
import ca.uhn.fhir.jpa.mdm.svc.MdmGoldenResourceDeletingSvc;
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkQuerySvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkSvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkUpdaterSvcImpl;
@ -52,8 +51,8 @@ import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchCriteriaBuilderSvc;
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchSvc;
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmGoldenResourceFindingSvc;
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
import ca.uhn.fhir.mdm.api.IMdmLinkSvc;
import ca.uhn.fhir.mdm.api.IMdmLinkUpdaterSvc;
@ -183,8 +182,8 @@ public class MdmConsumerConfig {
}
@Bean
IMdmExpungeSvc mdmResetSvc(MdmLinkDaoSvc theMdmLinkDaoSvc, MdmGoldenResourceDeletingSvc theDeletingSvc, IMdmSettings theIMdmSettings) {
return new MdmClearSvcImpl(theMdmLinkDaoSvc, theDeletingSvc, theIMdmSettings);
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
return new MdmBatchJobSubmitterFactoryImpl();
}
@Bean
@ -251,4 +250,5 @@ public class MdmConsumerConfig {
IMdmControllerSvc mdmControllerSvc() {
return new MdmControllerSvcImpl();
}
}

View File

@ -21,17 +21,15 @@ package ca.uhn.fhir.jpa.mdm.config;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDeleteSvc;
import ca.uhn.fhir.jpa.mdm.interceptor.MdmSubmitterInterceptorLoader;
import ca.uhn.fhir.jpa.mdm.svc.MdmChannelSubmitterSvcImpl;
import ca.uhn.fhir.jpa.mdm.svc.MdmGoldenResourceDeletingSvc;
import ca.uhn.fhir.jpa.mdm.svc.MdmSearchParamSvc;
import ca.uhn.fhir.jpa.mdm.svc.MdmSubmitSvcImpl;
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory;
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -60,11 +58,6 @@ public class MdmSubmitterConfig {
return new MdmLinkDeleteSvc();
}
@Bean
MdmGoldenResourceDeletingSvc mdmGoldenResourceDeletingSvc() {
return new MdmGoldenResourceDeletingSvc();
}
@Bean
@Lazy
IMdmChannelSubmitterSvc mdmChannelSubmitterSvc(FhirContext theFhirContext, IChannelFactory theChannelFactory) {
@ -72,7 +65,7 @@ public class MdmSubmitterConfig {
}
@Bean
IMdmSubmitSvc mdmBatchService(IMdmSettings theMdmSetting) {
IMdmSubmitSvc mdmSubmitService() {
return new MdmSubmitSvcImpl();
}
}

View File

@ -40,13 +40,10 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class MdmLinkDaoSvc {
@ -227,46 +224,6 @@ public class MdmLinkDaoSvc {
return myMdmLinkDao.findAll(example);
}
/**
* Delete all {@link MdmLink} entities, and return all resource PIDs from the source of the relationship.
*
* @return A list of Long representing the related Golden Resource Pids.
*/
@Transactional
public List<Long> deleteAllMdmLinksAndReturnGoldenResourcePids() {
List<MdmLink> all = myMdmLinkDao.findAll();
return deleteMdmLinksAndReturnGoldenResourcePids(all);
}
private List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<MdmLink> theLinks) {
Set<Long> goldenResources = theLinks.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
//and REDIRECT
goldenResources.addAll(theLinks.stream()
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
ourLog.info("Deleting {} MDM link records...", theLinks.size());
myMdmLinkDao.deleteAll(theLinks);
ourLog.info("{} MDM link records deleted", theLinks.size());
return new ArrayList<>(goldenResources);
}
/**
* Given a valid {@link String}, delete all {@link MdmLink} entities for that type, and get the Pids
* for the Golden Resources which were the sources of the links.
*
* @param theSourceType the type of relationship you would like to delete.
* @return A list of longs representing the Pids of the Golden Resources resources used as the sources of the relationships that were deleted.
*/
public List<Long> deleteAllMdmLinksOfTypeAndReturnGoldenResourcePids(String theSourceType) {
MdmLink link = new MdmLink();
link.setMdmSourceType(theSourceType);
Example<MdmLink> exampleLink = Example.of(link);
List<MdmLink> allOfType = myMdmLinkDao.findAll(exampleLink);
return deleteMdmLinksAndReturnGoldenResourcePids(allOfType);
}
/**
* Persist an MDM link to the database.
*

View File

@ -1,84 +0,0 @@
package ca.uhn.fhir.jpa.mdm.svc;
/*-
* #%L
* HAPI FHIR JPA Server - Master Data Management
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
/**
* This class is responsible for clearing out existing MDM links, as well as deleting all Golden Resources related to those MDM Links.
*/
public class MdmClearSvcImpl implements IMdmExpungeSvc {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
final MdmLinkDaoSvc myMdmLinkDaoSvc;
final MdmGoldenResourceDeletingSvc myMdmGoldenResourceDeletingSvcImpl;
final IMdmSettings myMdmSettings;
@Autowired
public MdmClearSvcImpl(MdmLinkDaoSvc theMdmLinkDaoSvc, MdmGoldenResourceDeletingSvc theMdmGoldenResourceDeletingSvcImpl, IMdmSettings theIMdmSettings) {
myMdmLinkDaoSvc = theMdmLinkDaoSvc;
myMdmGoldenResourceDeletingSvcImpl = theMdmGoldenResourceDeletingSvcImpl;
myMdmSettings = theIMdmSettings;
}
@Override
public long expungeAllMdmLinksOfSourceType(String theSourceResourceType, ServletRequestDetails theRequestDetails) {
throwExceptionIfInvalidSourceResourceType(theSourceResourceType);
ourLog.info("Clearing all MDM Links for resource type {}...", theSourceResourceType);
List<Long> goldenResourcePids = myMdmLinkDaoSvc.deleteAllMdmLinksOfTypeAndReturnGoldenResourcePids(theSourceResourceType);
DeleteMethodOutcome deleteOutcome = myMdmGoldenResourceDeletingSvcImpl.expungeGoldenResourcePids(goldenResourcePids, theSourceResourceType, theRequestDetails);
ourLog.info("MDM clear operation complete. Removed {} MDM links and {} Golden Resources.", goldenResourcePids.size(), deleteOutcome.getExpungedResourcesCount());
return goldenResourcePids.size();
}
private void throwExceptionIfInvalidSourceResourceType(String theResourceType) {
if (!myMdmSettings.isSupportedMdmType(theResourceType)) {
throw new InvalidRequestException(ProviderConstants.MDM_CLEAR + " does not support resource type: " + theResourceType);
}
}
@Override
public long expungeAllMdmLinks(ServletRequestDetails theRequestDetails) {
ourLog.info("Clearing all MDM Links...");
long retVal = 0;
for(String mdmType : myMdmSettings.getMdmRules().getMdmTypes()) {
List<Long> goldenResourcePids = myMdmLinkDaoSvc.deleteAllMdmLinksAndReturnGoldenResourcePids();
DeleteMethodOutcome deleteOutcome = myMdmGoldenResourceDeletingSvcImpl.expungeGoldenResourcePids(goldenResourcePids, null, theRequestDetails);
ourLog.info("MDM clear operation on type {} complete. Removed {} MDM links and expunged {} Golden resources.", mdmType, goldenResourcePids.size(), deleteOutcome.getExpungedResourcesCount());
retVal += goldenResourcePids.size();
}
ourLog.info("MDM clear completed expunged with a total of {} golden resources cleared.", retVal);
return retVal;
}
}

View File

@ -20,7 +20,9 @@ package ca.uhn.fhir.jpa.mdm.svc;
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
import ca.uhn.fhir.mdm.api.IMdmLinkUpdaterSvc;
@ -31,14 +33,20 @@ import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.mdm.provider.MdmControllerHelper;
import ca.uhn.fhir.mdm.provider.MdmControllerUtil;
import ca.uhn.fhir.rest.server.provider.MultiUrlProcessor;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.util.List;
/**
* This class acts as a layer between MdmProviders and MDM services to support a REST API that's not a FHIR Operation API.
@ -46,6 +54,8 @@ import javax.annotation.Nullable;
@Service
public class MdmControllerSvcImpl implements IMdmControllerSvc {
@Autowired
FhirContext myFhirContext;
@Autowired
MdmControllerHelper myMdmControllerHelper;
@Autowired
@ -54,6 +64,11 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
IMdmLinkQuerySvc myMdmLinkQuerySvc;
@Autowired
IMdmLinkUpdaterSvc myIMdmLinkUpdaterSvc;
@Autowired
IMdmBatchJobSubmitterFactory myMdmBatchJobSubmitterFactory;
public MdmControllerSvcImpl() {
}
@Override
public IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext) {
@ -91,6 +106,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
return myIMdmLinkUpdaterSvc.updateLink(goldenResource, source, matchResult, theMdmTransactionContext);
}
@Override
public IBaseParameters submitMdmClearJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails) {
MultiUrlProcessor multiUrlProcessor = new MultiUrlProcessor(myFhirContext, myMdmBatchJobSubmitterFactory.getClearJobSubmitter());
return multiUrlProcessor.processUrls(theUrls, multiUrlProcessor.getBatchSize(theBatchSize), theRequestDetails);
}
@Override
public void notDuplicateGoldenResource(String theGoldenResourceId, String theTargetGoldenResourceId, MdmTransactionContext theMdmTransactionContext) {
IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId);

View File

@ -1,57 +0,0 @@
package ca.uhn.fhir.jpa.mdm.svc;
/*-
* #%L
* HAPI FHIR JPA Server - Master Data Management
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.SliceImpl;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class MdmGoldenResourceDeletingSvc {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
/**
* This is here for the case of possible infinite loops. Technically batch conflict deletion should handle this, but this is an escape hatch.
*/
private static final int MAXIMUM_DELETE_ATTEMPTS = 100000;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private ExpungeService myExpungeService;
@Autowired
DeleteExpungeService myDeleteExpungeService;
public DeleteMethodOutcome expungeGoldenResourcePids(List<Long> theGoldenResourcePids, String theResourceType, ServletRequestDetails theRequestDetails) {
return myDeleteExpungeService.expungeByResourcePids(ProviderConstants.MDM_CLEAR, theResourceType, new SliceImpl<>(theGoldenResourcePids), theRequestDetails);
}
}

View File

@ -20,10 +20,6 @@ package ca.uhn.fhir.jpa.mdm.svc;
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
@ -31,6 +27,10 @@ import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.log.Logs;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@ -69,6 +69,9 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc {
private int myBufferSize = DEFAULT_BUFFER_SIZE;
public MdmSubmitSvcImpl() {
}
@Override
@Transactional
public long submitAllSourceTypesToMdm(@Nullable String theCriteria) {

View File

@ -1,13 +1,13 @@
package ca.uhn.fhir.jpa.mdm.config;
import ca.uhn.fhir.jpa.mdm.helper.MdmLinkHelper;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
import ca.uhn.fhir.jpa.mdm.helper.MdmLinkHelper;
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -41,4 +41,9 @@ public abstract class BaseTestMdmConfig {
MdmLinkHelper mdmLinkHelper() {
return new MdmLinkHelper();
}
@Bean
BatchJobHelper batchJobHelper(JobExplorer theJobExplorer) {
return new BatchJobHelper(theJobExplorer);
}
}

View File

@ -1,9 +1,9 @@
package ca.uhn.fhir.jpa.mdm.provider;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.StringType;
@ -51,6 +51,8 @@ public abstract class BaseLinkR4Test extends BaseProviderR4Test {
saveLink(myLink);
assertEquals(MdmLinkSourceEnum.AUTO, myLink.getLinkSource());
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setDeleteExpungeEnabled(true);
}
@AfterEach

View File

@ -1,23 +1,29 @@
package ca.uhn.fhir.jpa.mdm.provider;
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus;
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public abstract class BaseProviderR4Test extends BaseMdmR4Test {
MdmProviderDstu3Plus myMdmProvider;
@ -26,11 +32,13 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
@Autowired
private IMdmControllerSvc myMdmControllerSvc;
@Autowired
private IMdmExpungeSvc myMdmExpungeSvc;
private IMdmClearJobSubmitter myMdmClearJobSubmitter;
@Autowired
private IMdmSubmitSvc myMdmSubmitSvc;
@Autowired
private MdmSettings myMdmSettings;
@Autowired
BatchJobHelper myBatchJobHelper;
private String defaultScript;
@ -44,13 +52,31 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
@BeforeEach
public void before() {
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmExpungeSvc, myMdmSubmitSvc);
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmSubmitSvc, myMdmSettings);
defaultScript = myMdmSettings.getScriptText();
}
@AfterEach
public void after() throws IOException {
super.after();
myMdmSettings.setScriptText(defaultScript);
myMdmResourceMatcherSvc.init();// This bugger creates new objects from the beans and then ignores them.
}
protected void clearMdmLinks() {
Parameters result = (Parameters) myMdmProvider.clearMdmLinks(null, null, myRequestDetails);
myBatchJobHelper.awaitJobExecution(BatchHelperR4.jobIdFromParameters(result));
}
protected void clearMdmLinks(String theResourceName) {
Parameters result = (Parameters) myMdmProvider.clearMdmLinks(getResourceNames(theResourceName), null, myRequestDetails);
myBatchJobHelper.awaitJobExecution(BatchHelperR4.jobIdFromParameters(result));
}
@Nonnull
protected List<IPrimitiveType<String>> getResourceNames(String theResourceName) {
List<IPrimitiveType<String>> resourceNames = new ArrayList<>();
resourceNames.add(new StringType(theResourceName));
return resourceNames;
}
}

View File

@ -68,7 +68,7 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnAllMedications() throws InterruptedException {
StringType criteria = null;
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchOnAllSourceResources(new StringType("Medication"), criteria, null));
assertLinkCount(1);
@ -77,32 +77,33 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnAllPractitioners() throws InterruptedException {
StringType criteria = null;
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPractitionerType(criteria, null));
assertLinkCount(1);
}
@Test
public void testBatchRunOnSpecificPractitioner() throws InterruptedException {
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPractitionerInstance(myPractitioner.getIdElement(), null));
assertLinkCount(1);
}
@Test
public void testBatchRunOnNonExistentSpecificPractitioner() {
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
try {
myMdmProvider.mdmBatchPractitionerInstance(new IdType("Practitioner/999"), null);
fail();
} catch (ResourceNotFoundException e){}
} catch (ResourceNotFoundException e) {
}
}
@Test
public void testBatchRunOnAllPatients() throws InterruptedException {
assertLinkCount(3);
StringType criteria = null;
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPatientType(criteria, null));
assertLinkCount(1);
}
@ -110,7 +111,7 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnSpecificPatient() throws InterruptedException {
assertLinkCount(3);
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPatientInstance(myPatient.getIdElement(), null));
assertLinkCount(1);
}
@ -118,18 +119,19 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnNonExistentSpecificPatient() {
assertLinkCount(3);
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
try {
myMdmProvider.mdmBatchPatientInstance(new IdType("Patient/999"), null);
fail();
} catch (ResourceNotFoundException e){}
} catch (ResourceNotFoundException e) {
}
}
@Test
public void testBatchRunOnAllTypes() throws InterruptedException {
assertLinkCount(3);
StringType criteria = new StringType("");
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
afterMdmLatch.runWithExpectedCount(3, () -> {
myMdmProvider.mdmBatchOnAllSourceResources(null, criteria, null);
});
@ -140,12 +142,12 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
public void testBatchRunOnAllTypesWithInvalidCriteria() {
assertLinkCount(3);
StringType criteria = new StringType("death-date=2020-06-01");
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
try {
myMdmProvider.mdmBatchPractitionerType(criteria, null);
fail();
} catch(InvalidRequestException e) {
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), is(equalTo("Failed to parse match URL[death-date=2020-06-01] - Resource type Practitioner does not have a parameter with name: death-date")));
}
}

View File

@ -10,7 +10,6 @@ import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.StringType;
@ -30,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue;
import static org.junit.jupiter.api.Assertions.fail;
public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
protected Practitioner myPractitioner;
protected StringType myPractitionerId;
protected IAnyResource myPractitionerGoldenResource;
@ -48,7 +46,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
@Test
public void testClearAllLinks() {
assertLinkCount(2);
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
assertNoLinksExist();
}
@ -70,12 +68,13 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
assertLinkCount(2);
Patient read = myPatientDao.read(new IdDt(mySourcePatientId.getValueAsString()).toVersionless());
assertThat(read, is(notNullValue()));
myMdmProvider.clearMdmLinks(new StringType("Patient"), myRequestDetails);
clearMdmLinks("Patient");
assertNoPatientLinksExist();
try {
myPatientDao.read(new IdDt(mySourcePatientId.getValueAsString()).toVersionless());
fail();
} catch (ResourceNotFoundException e) {}
} catch (ResourceNotFoundException e) {
}
}
@Test
@ -87,7 +86,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
Patient patientAndUpdateLinks = createPatientAndUpdateLinks(buildJanePatient());
IAnyResource goldenResource = getGoldenResourceFromTargetResource(patientAndUpdateLinks);
assertThat(goldenResource, is(notNullValue()));
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
assertNoPatientLinksExist();
goldenResource = getGoldenResourceFromTargetResource(patientAndUpdateLinks);
assertThat(goldenResource, is(nullValue()));
@ -104,7 +103,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
linkGoldenResources(goldenResourceFromTarget, goldenResourceFromTarget2);
//SUT
myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
assertNoPatientLinksExist();
IBundleProvider search = myPatientDao.search(buildGoldenResourceParameterMap());
@ -135,7 +134,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
linkGoldenResources(goldenResourceFromTarget2, goldenResourceFromTarget);
//SUT
IBaseParameters parameters = myMdmProvider.clearMdmLinks(null, myRequestDetails);
clearMdmLinks();
printLinks();
@ -157,18 +156,19 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
assertLinkCount(2);
Practitioner read = myPractitionerDao.read(new IdDt(myPractitionerGoldenResourceId.getValueAsString()).toVersionless());
assertThat(read, is(notNullValue()));
myMdmProvider.clearMdmLinks(new StringType("Practitioner"), myRequestDetails);
clearMdmLinks("Practitioner");
assertNoPractitionerLinksExist();
try {
myPractitionerDao.read(new IdDt(myPractitionerGoldenResourceId.getValueAsString()).toVersionless());
fail();
} catch (ResourceNotFoundException e) {}
} catch (ResourceNotFoundException e) {
}
}
@Test
public void testClearInvalidTargetType() {
try {
myMdmProvider.clearMdmLinks(new StringType("Observation"), myRequestDetails);
myMdmProvider.clearMdmLinks(getResourceNames("Observation"), null, myRequestDetails);
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), is(equalTo("$mdm-clear does not support resource type: Observation")));

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,5 @@
package ca.uhn.fhir.mdm.api;
public interface IMdmBatchJobSubmitterFactory {
IMdmClearJobSubmitter getClearJobSubmitter();
}

View File

@ -20,24 +20,10 @@ package ca.uhn.fhir.mdm.api;
* #L%
*/
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IMultiUrlJobSubmitter;
public interface IMdmExpungeSvc {
/**
* Given a resource type, delete the underlying MDM links, and their related golden resource objects.
*
* @param theSourceResourceType The type of resources
* @param theRequestDetails
* @return the count of deleted MDM links
*/
long expungeAllMdmLinksOfSourceType(String theSourceResourceType, ServletRequestDetails theRequestDetails);
/**
* Delete all MDM links, and their related golden resource objects.
*
* @return the count of deleted MDM links
* @param theRequestDetails
*/
long expungeAllMdmLinks(ServletRequestDetails theRequestDetails);
/**
* Tag interface for Spring autowiring
*/
public interface IMdmClearJobSubmitter extends IMultiUrlJobSubmitter {
}

View File

@ -22,10 +22,15 @@ package ca.uhn.fhir.mdm.api;
import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.data.domain.Page;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.util.List;
public interface IMdmControllerSvc {
@ -38,4 +43,6 @@ public interface IMdmControllerSvc {
IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext);
IAnyResource updateLink(String theGoldenResourceId, String theSourceResourceId, String theMatchResult, MdmTransactionContext theMdmTransactionContext);
IBaseParameters submitMdmClearJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails);
}

View File

@ -22,8 +22,8 @@ package ca.uhn.fhir.mdm.provider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.api.MatchedTarget;
import ca.uhn.fhir.mdm.api.MdmConstants;
@ -31,7 +31,6 @@ import ca.uhn.fhir.mdm.api.MdmLinkJson;
import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.primitive.IntegerDt;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
@ -58,10 +57,12 @@ import org.springframework.data.domain.Page;
import javax.annotation.Nonnull;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAM_OFFSET;
import static org.slf4j.LoggerFactory.getLogger;
@ -69,11 +70,10 @@ import static org.slf4j.LoggerFactory.getLogger;
public class MdmProviderDstu3Plus extends BaseMdmProvider {
private static final Logger ourLog = getLogger(MdmProviderDstu3Plus.class);
private final IMdmControllerSvc myMdmControllerSvc;
private final IMdmMatchFinderSvc myMdmMatchFinderSvc;
private final IMdmExpungeSvc myMdmExpungeSvc;
private final IMdmSubmitSvc myMdmSubmitSvc;
private final IMdmSettings myMdmSettings;
public static final int DEFAULT_PAGE_SIZE = 20;
public static final int MAX_PAGE_SIZE = 100;
@ -84,12 +84,12 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
* Note that this is not a spring bean. Any necessary injections should
* happen in the constructor
*/
public MdmProviderDstu3Plus(FhirContext theFhirContext, IMdmControllerSvc theMdmControllerSvc, IMdmMatchFinderSvc theMdmMatchFinderSvc, IMdmExpungeSvc theMdmExpungeSvc, IMdmSubmitSvc theMdmSubmitSvc) {
public MdmProviderDstu3Plus(FhirContext theFhirContext, IMdmControllerSvc theMdmControllerSvc, IMdmMatchFinderSvc theMdmMatchFinderSvc, IMdmSubmitSvc theMdmSubmitSvc, IMdmSettings theIMdmSettings) {
super(theFhirContext);
myMdmControllerSvc = theMdmControllerSvc;
myMdmMatchFinderSvc = theMdmMatchFinderSvc;
myMdmExpungeSvc = theMdmExpungeSvc;
myMdmSubmitSvc = theMdmSubmitSvc;
myMdmSettings = theIMdmSettings;
}
@Operation(name = ProviderConstants.EMPI_MATCH, typeName = "Patient")
@ -180,21 +180,33 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
);
}
@Operation(name = ProviderConstants.MDM_CLEAR, returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "decimal")
@Operation(name = ProviderConstants.OPERATION_MDM_CLEAR, returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "decimal")
})
public IBaseParameters clearMdmLinks(@OperationParam(name = ProviderConstants.MDM_CLEAR_SOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theSourceType,
public IBaseParameters clearMdmLinks(@OperationParam(name = ProviderConstants.OPERATION_MDM_CLEAR_RESOURCE_NAME, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theResourceNames,
@OperationParam(name = ProviderConstants.OPERATION_MDM_CLEAR_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType<BigDecimal> theBatchSize,
ServletRequestDetails theRequestDetails) {
long resetCount;
if (theSourceType == null || StringUtils.isBlank(theSourceType.getValue())) {
resetCount = myMdmExpungeSvc.expungeAllMdmLinks(theRequestDetails);
List<String> resourceNames = new ArrayList<>();
if (theResourceNames != null) {
resourceNames.addAll(theResourceNames.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
validateResourceNames(resourceNames);
} else {
resetCount = myMdmExpungeSvc.expungeAllMdmLinksOfSourceType(theSourceType.getValueAsString(), theRequestDetails);
resourceNames.addAll(myMdmSettings.getMdmRules().getMdmTypes());
}
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_MDM_CLEAR_OUT_PARAM_DELETED_COUNT, resetCount);
return retval;
List<String> urls = resourceNames.stream().map(s -> s + "?").collect(Collectors.toList());
return myMdmControllerSvc.submitMdmClearJob(urls, theBatchSize, theRequestDetails);
}
private void validateResourceNames(List<String> theResourceNames) {
for (String resourceName : theResourceNames) {
if (!myMdmSettings.isSupportedMdmType(resourceName)) {
throw new InvalidRequestException(ProviderConstants.OPERATION_MDM_CLEAR + " does not support resource type: " + resourceName);
}
}
}
@Operation(name = ProviderConstants.MDM_QUERY_LINKS, idempotent = true)
@ -202,9 +214,9 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theResourceId,
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_MATCH_RESULT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theMatchResult,
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_LINK_SOURCE, min = 0, max = 1, typeName = "string")
IPrimitiveType<String> theLinkSource,
IPrimitiveType<String> theLinkSource,
@Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.")
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.")
@OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer")
IPrimitiveType<Integer> theOffset,
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.")
@ -233,7 +245,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
MdmPageRequest mdmPageRequest = new MdmPageRequest(theOffset, theCount, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE);
Page<MdmLinkJson> possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources(createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, (String) null), mdmPageRequest);
Page<MdmLinkJson> possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources(createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, null), mdmPageRequest);
return parametersFromMdmLinks(possibleDuplicates, false, theRequestDetails, mdmPageRequest);
}
@ -255,7 +267,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
}
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
})
public IBaseParameters mdmBatchOnAllSourceResources(
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theResourceType,
@ -278,7 +290,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
})
public IBaseParameters mdmBatchPatientInstance(
@IdParam IIdType theIdParam,
@ -288,7 +300,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
}
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
})
public IBaseParameters mdmBatchPatientType(
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType<String> theCriteria,
@ -299,7 +311,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
}
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
})
public IBaseParameters mdmBatchPractitionerInstance(
@IdParam IIdType theIdParam,
@ -309,7 +321,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
}
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
})
public IBaseParameters mdmBatchPractitionerType(
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType<String> theCriteria,
@ -324,7 +336,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
*/
public IBaseParameters buildMdmOutParametersWithCount(long theCount) {
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, theCount);
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, theCount);
return retval;
}

View File

@ -23,12 +23,9 @@ package ca.uhn.fhir.mdm.provider;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -46,9 +43,9 @@ public class MdmProviderLoader {
@Autowired
private IMdmControllerSvc myMdmControllerSvc;
@Autowired
private IMdmExpungeSvc myMdmExpungeSvc;
@Autowired
private IMdmSubmitSvc myMdmSubmitSvc;
@Autowired
private IMdmSettings myMdmSettings;
private BaseMdmProvider myMdmProvider;
@ -57,7 +54,7 @@ public class MdmProviderLoader {
case DSTU3:
case R4:
myResourceProviderFactory.addSupplier(() -> {
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmExpungeSvc, myMdmSubmitSvc);
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmSubmitSvc, myMdmSettings);
return myMdmProvider;
});
break;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE2-SNAPSHOT</version>
<version>5.6.0-PRE3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More