Merge branch 'master' into issue-2901-npe-in-bundle-transaction
This commit is contained in:
commit
f04cf02be7
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
type: change
|
||||
title: "The $mdm-clear operation has been changed to use Spring Batch."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2967
|
||||
jira: SMILE-2899
|
||||
title: "Previously, the system would only traverse references to discrete resources while performing a chained search.
|
||||
This fix adds support for traversing references to contained resources as well, with the limitation that the reference
|
||||
to the contained resource must be the last reference in the chain."
|
|
@ -567,11 +567,15 @@ Note that the request goes to the root of the FHIR server, and not the `Organiza
|
|||
|
||||
## Clearing MDM Links
|
||||
|
||||
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This operation is meant to be used during the rules-tuning phase of the MDM implementation so that you can quickly test your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links and Golden Resources.
|
||||
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This
|
||||
operation is intended to be used during the rules-tuning phase of the MDM implementation so that you can quickly test
|
||||
your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links
|
||||
and Golden Resources.
|
||||
|
||||
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources are deleted and expunged from the server.
|
||||
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources
|
||||
are deleted and expunged from the server.
|
||||
|
||||
This operation takes a single optional Parameter.
|
||||
This operation takes two optional Parameters.
|
||||
|
||||
<table class="table table-striped table-condensed">
|
||||
<thead>
|
||||
|
@ -584,11 +588,21 @@ This operation takes a single optional Parameter.
|
|||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>sourceType</td>
|
||||
<td>resourceType</td>
|
||||
<td>String</td>
|
||||
<td>0..*</td>
|
||||
<td>
|
||||
The Source resource types you would like to clear. If omitted, all resource types will be cleared.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>batchSize</td>
|
||||
<td>Integer</td>
|
||||
<td>0..1</td>
|
||||
<td>
|
||||
The Source Resource type you would like to clear. If omitted, will operate over all links.
|
||||
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
property.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
@ -598,33 +612,27 @@ This operation takes a single optional Parameter.
|
|||
|
||||
Use an HTTP POST to the following URL to invoke this operation:
|
||||
|
||||
```url
|
||||
http://example.com/$mdm-clear
|
||||
```
|
||||
```http
|
||||
POST /$mdm-clear
|
||||
Content-Type: application/fhir+json
|
||||
|
||||
The following request body could be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Parameters",
|
||||
"parameter": [ {
|
||||
"name": "sourceType",
|
||||
"name": "resourceType",
|
||||
"valueString": "Patient"
|
||||
}, {
|
||||
"name": "resourceType",
|
||||
"valueString": "Practitioner"
|
||||
}, {
|
||||
"name": "batchSize",
|
||||
"valueDecimal": 1000
|
||||
} ]
|
||||
}
|
||||
```
|
||||
|
||||
This operation returns the number of MDM links that were cleared. The following is a sample response:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Parameters",
|
||||
"parameter": [ {
|
||||
"name": "reset",
|
||||
"valueDecimal": 5
|
||||
} ]
|
||||
}
|
||||
```
|
||||
This operation returns the job execution id of the Spring Batch job that will be run to remove all the links and their
|
||||
golden resources.
|
||||
|
||||
## Batch-creating MDM Links
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.mdm.job.MdmClearJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
|
@ -40,7 +41,8 @@ import java.util.Set;
|
|||
BulkImportJobConfig.class,
|
||||
DeleteExpungeJobConfig.class,
|
||||
ReindexJobConfig.class,
|
||||
ReindexEverythingJobConfig.class
|
||||
ReindexEverythingJobConfig.class,
|
||||
MdmClearJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
|
||||
|
@ -94,4 +96,9 @@ public class BatchJobsConfig {
|
|||
*/
|
||||
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
|
||||
|
||||
/**
|
||||
* MDM Clear
|
||||
*/
|
||||
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
|
||||
|
||||
}
|
||||
|
|
|
@ -20,15 +20,46 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class CommonBatchJobConfig {
|
||||
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
|
||||
|
||||
@Bean
|
||||
public MultiUrlJobParameterValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
public class MultiUrlProcessorJobConfig {
|
||||
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
}
|
|
@ -43,6 +43,9 @@ public class PartitionedUrlValidator {
|
|||
@Autowired
|
||||
FhirContext myFhirContext;
|
||||
|
||||
public PartitionedUrlValidator() {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MdmBatchJobSubmitterFactoryImpl implements IMdmBatchJobSubmitterFactory {
|
||||
@Autowired
|
||||
IMdmClearJobSubmitter myMdmClearJobSubmitter;
|
||||
|
||||
@Override
|
||||
public IMdmClearJobSubmitter getClearJobSubmitter() {
|
||||
return myMdmClearJobSubmitter;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.List;
|
||||
|
||||
public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
PartitionedUrlValidator myPartitionedUrlValidator;
|
||||
@Autowired
|
||||
IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME)
|
||||
private Job myMdmClearJob;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitJob(Integer theBatchSize, List<String> theUrls, RequestDetails theRequest) throws JobParametersInvalidException {
|
||||
if (theBatchSize == null) {
|
||||
theBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
if (!myDaoConfig.canDeleteExpunge()) {
|
||||
throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
|
||||
}
|
||||
|
||||
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrls);
|
||||
|
||||
for (String url : theUrls) {
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(String.class, url);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
}
|
||||
|
||||
JobParameters jobParameters = ReverseCronologicalBatchMdmLinkPidReader.buildJobParameters(ProviderConstants.OPERATION_MDM_CLEAR, theBatchSize, requestListJson);
|
||||
return myBatchJobSubmitter.runJob(myMdmClearJob, jobParameters);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* $mdm-clear job.
|
||||
*/
|
||||
@Configuration
|
||||
public class MdmClearJobConfig {
|
||||
public static final String MDM_CLEAR_RESOURCE_LIST_STEP_NAME = "mdm-clear-resource-list-step";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
@Autowired
|
||||
private DeleteExpungeProcessor myDeleteExpungeProcessor;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("deleteExpungePromotionListener")
|
||||
private ExecutionContextPromotionListener myDeleteExpungePromotionListener;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private SqlExecutorWriter mySqlExecutorWriter;
|
||||
|
||||
@Bean(name = MDM_CLEAR_JOB_NAME)
|
||||
@Lazy
|
||||
public Job mdmClearJob() {
|
||||
return myJobBuilderFactory.get(MDM_CLEAR_JOB_NAME)
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(mdmClearUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step mdmClearUrlListStep() {
|
||||
return myStepBuilderFactory.get(MDM_CLEAR_RESOURCE_LIST_STEP_NAME)
|
||||
.<List<Long>, List<String>>chunk(1)
|
||||
.reader(reverseCronologicalBatchMdmLinkPidReader())
|
||||
.processor(deleteThenExpungeCompositeProcessor())
|
||||
.writer(mySqlExecutorWriter)
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(myDeleteExpungePromotionListener)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ItemProcessor<List<Long>, List<String>> deleteThenExpungeCompositeProcessor() {
|
||||
CompositeItemProcessor<List<Long>, List<String>> compositeProcessor = new CompositeItemProcessor<>();
|
||||
List itemProcessors = new ArrayList<>();
|
||||
itemProcessors.add(mdmLinkDeleter());
|
||||
itemProcessors.add(myDeleteExpungeProcessor);
|
||||
compositeProcessor.setDelegates(itemProcessors);
|
||||
return compositeProcessor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchMdmLinkPidReader reverseCronologicalBatchMdmLinkPidReader() {
|
||||
return new ReverseCronologicalBatchMdmLinkPidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MdmLinkDeleter mdmLinkDeleter() {
|
||||
return new MdmLinkDeleter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener mdmClearPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Take MdmLink pids in and output golden resource pids out
|
||||
*/
|
||||
|
||||
public class MdmLinkDeleter implements ItemProcessor<List<Long>, List<Long>> {
|
||||
public static final String PROCESS_NAME = "MdmClear";
|
||||
public static final String THREAD_PREFIX = "mdmClear";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkDeleter.class);
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
|
||||
@Override
|
||||
public List<Long> process(List<Long> thePidList) throws Exception {
|
||||
ConcurrentLinkedQueue<Long> goldenPidAggregator = new ConcurrentLinkedQueue<>();
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(new SliceImpl<>(thePidList), pids -> removeLinks(thePidList, goldenPidAggregator));
|
||||
return new ArrayList<>(goldenPidAggregator);
|
||||
}
|
||||
|
||||
private void removeLinks(List<Long> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
|
||||
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(pidList)));
|
||||
}
|
||||
|
||||
public List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<Long> thePids) {
|
||||
List<MdmLink> links = myMdmLinkDao.findAllById(thePids);
|
||||
Set<Long> goldenResources = links.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
|
||||
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
|
||||
//and REDIRECT
|
||||
goldenResources.addAll(links.stream()
|
||||
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|
||||
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
|
||||
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
|
||||
ourLog.info("Deleting {} MDM link records...", links.size());
|
||||
myMdmLinkDao.deleteAll(links);
|
||||
ourLog.info("{} MDM link records deleted", links.size());
|
||||
return new ArrayList<>(goldenResources);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.reader.BaseReverseCronologicalBatchPidReader;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This is the same as the parent class, except it operates on MdmLink entities instead of resource entities
|
||||
*/
|
||||
public class ReverseCronologicalBatchMdmLinkPidReader extends BaseReverseCronologicalBatchPidReader {
|
||||
@Autowired
|
||||
IMdmLinkDao myMdmLinkDao;
|
||||
|
||||
@Override
|
||||
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
|
||||
String resourceName = resourceSearch.getResourceName();
|
||||
Pageable pageable = PageRequest.of(0, getBatchSize());
|
||||
//Expand out the list to handle the REDIRECT/POSSIBLE DUPLICATE ones.
|
||||
return new HashSet<>(myMdmLinkDao.findPidByResourceNameAndThreshold(resourceName, getCurrentHighThreshold(), pageable));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
setDateExtractorFunction(pid -> myMdmLinkDao.findById(pid).get().getCreated());
|
||||
}
|
||||
}
|
|
@ -69,7 +69,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
|
|||
List<IBaseResource> outgoing = new ArrayList<>();
|
||||
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
||||
|
||||
ourLog.trace("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||
ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||
|
||||
return outgoing;
|
||||
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 4 parameters:
|
||||
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching entities are available. It returns the resources in reverse chronological order
|
||||
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
private static String highKey(int theIndex) {
|
||||
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
|
||||
myPartitionedUrls = requestListJson.getPartitionedUrls();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
}
|
||||
|
||||
return nextBatch;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected List<Long> getNextBatch() {
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
|
||||
myAlreadyProcessedPidsWithHighDate.putIfAbsent(myUrlIndex, new HashSet<>());
|
||||
Set<Long> newPids = getNextPidBatch(resourceSearch);
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
|
||||
ourLog.debug("Results: {}", newPids);
|
||||
}
|
||||
|
||||
setDateFromPidFunction(resourceSearch);
|
||||
|
||||
List<Long> retval = new ArrayList<>(newPids);
|
||||
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(getCurrentHighThreshold(), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
protected Date getCurrentHighThreshold() {
|
||||
return myThresholdHighByUrlIndex.get(myUrlIndex);
|
||||
}
|
||||
|
||||
protected void setDateExtractorFunction(Function<Long, Date> theDateExtractorFunction) {
|
||||
myBatchDateThresholdUpdater.setDateFromPid(theDateExtractorFunction);
|
||||
}
|
||||
|
||||
protected void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(getCurrentHighThreshold()));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
String key = highKey(index);
|
||||
if (executionContext.containsKey(key)) {
|
||||
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
|
||||
} else {
|
||||
myThresholdHighByUrlIndex.put(index, myStartTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
Date date = myThresholdHighByUrlIndex.get(index);
|
||||
if (date != null) {
|
||||
executionContext.putLong(highKey(index), date.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
protected Integer getBatchSize() {
|
||||
return myBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
protected Set<Long> getAlreadySeenPids() {
|
||||
return myAlreadyProcessedPidsWithHighDate.get(myUrlIndex);
|
||||
}
|
||||
|
||||
protected abstract Set<Long> getNextPidBatch(ResourceSearch resourceSearch);
|
||||
|
||||
protected abstract void setDateFromPidFunction(ResourceSearch resourceSearch);
|
||||
}
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.batch.reader;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -93,7 +93,7 @@ public class CronologicalBatchAllResourcePidReader implements ItemReader<List<Lo
|
|||
public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson()));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
|
|
|
@ -20,209 +20,52 @@ package ca.uhn.fhir.jpa.batch.reader;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 4 parameters:
|
||||
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching resources are available. It returns the resources in reverse chronological order
|
||||
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public class ReverseCronologicalBatchResourcePidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
public class ReverseCronologicalBatchResourcePidReader extends BaseReverseCronologicalBatchPidReader {
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private BatchResourceSearcher myBatchResourceSearcher;
|
||||
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
|
||||
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
|
||||
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
@Autowired
|
||||
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
|
||||
myPartitionedUrls = requestListJson.getPartitionedUrls();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
}
|
||||
|
||||
return nextBatch;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<Long> getNextBatch() {
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
|
||||
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
|
||||
Set<Long> retval = new LinkedHashSet<>();
|
||||
addDateCountAndSortToSearch(resourceSearch);
|
||||
|
||||
// Perform the search
|
||||
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, myBatchSize);
|
||||
Set<Long> newPids = new LinkedHashSet<>();
|
||||
Set<Long> alreadySeenPids = myAlreadyProcessedPidsWithHighDate.computeIfAbsent(myUrlIndex, i -> new HashSet<>());
|
||||
Integer batchSize = getBatchSize();
|
||||
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, batchSize);
|
||||
Set<Long> alreadySeenPids = getAlreadySeenPids();
|
||||
|
||||
do {
|
||||
List<Long> pids = resultIter.getNextResultBatch(myBatchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
newPids.addAll(pids);
|
||||
newPids.removeAll(alreadySeenPids);
|
||||
} while (newPids.size() < myBatchSize && resultIter.hasNext());
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
|
||||
ourLog.debug("Results: {}", newPids);
|
||||
}
|
||||
|
||||
setDateFromPidFunction(resourceSearch);
|
||||
|
||||
List<Long> retval = new ArrayList<>(newPids);
|
||||
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdHighByUrlIndex.get(myUrlIndex), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
|
||||
List<Long> pids = resultIter.getNextResultBatch(batchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
retval.addAll(pids);
|
||||
retval.removeAll(alreadySeenPids);
|
||||
} while (retval.size() < batchSize && resultIter.hasNext());
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
private void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
@Override
|
||||
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
|
||||
|
||||
myBatchDateThresholdUpdater.setDateFromPid(pid -> {
|
||||
setDateExtractorFunction(pid -> {
|
||||
IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid));
|
||||
return oldestResource.getMeta().getLastUpdated();
|
||||
});
|
||||
}
|
||||
|
||||
private void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
String key = highKey(index);
|
||||
if (executionContext.containsKey(key)) {
|
||||
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
|
||||
} else {
|
||||
myThresholdHighByUrlIndex.put(index, myStartTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String highKey(int theIndex) {
|
||||
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
Date date = myThresholdHighByUrlIndex.get(index);
|
||||
if (date != null) {
|
||||
executionContext.putLong(highKey(index), date.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,8 @@ import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
|||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.MdmClearJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher;
|
||||
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
|
@ -35,7 +37,6 @@ import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
|||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
|
@ -136,6 +137,8 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
|||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter;
|
||||
|
@ -515,10 +518,20 @@ public abstract class BaseConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public MdmLinkExpandSvc myMdmLinkExpandSvc() {
|
||||
public MdmLinkExpandSvc mdmLinkExpandSvc() {
|
||||
return new MdmLinkExpandSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
|
||||
return new MdmBatchJobSubmitterFactoryImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmClearJobSubmitter mdmClearJobSubmitter() {
|
||||
return new MdmClearJobSubmitterImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public TerminologyUploaderProvider terminologyUploaderProvider() {
|
||||
|
@ -891,11 +904,6 @@ public abstract class BaseConfig {
|
|||
return new DaoSearchParamSynchronizer();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public DeleteExpungeService deleteExpungeService() {
|
||||
return new DeleteExpungeService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceTableFKProvider resourceTableFKProvider() {
|
||||
return new ResourceTableFKProvider();
|
||||
|
|
|
@ -618,49 +618,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
|
||||
if (!skipUpdatingTags) {
|
||||
Set<ResourceTag> allDefs = new HashSet<>();
|
||||
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
||||
|
||||
if (theResource instanceof IResource) {
|
||||
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
||||
} else {
|
||||
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||
if (def.isStandardType() == false) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
|
||||
ResourceTag tag = theEntity.addTag(profileDef);
|
||||
allDefs.add(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
|
||||
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
||||
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
||||
allTagsNew.forEach(tag -> {
|
||||
|
||||
// Don't keep duplicate tags
|
||||
if (!allDefsPresent.add(tag.getTag())) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
|
||||
// Drop any tags that have been removed
|
||||
if (!allDefs.contains(tag)) {
|
||||
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
if (!allTagsOld.equals(allTagsNew)) {
|
||||
changed = true;
|
||||
}
|
||||
theEntity.setHasTags(!allTagsNew.isEmpty());
|
||||
changed |= updateTags(theTransactionDetails, theRequest, theResource, theEntity);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -669,7 +627,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
encoding = ResourceEncodingEnum.DEL;
|
||||
}
|
||||
|
||||
if (thePerformIndexing && changed == false) {
|
||||
if (thePerformIndexing && !changed) {
|
||||
if (theEntity.getId() == null) {
|
||||
changed = true;
|
||||
} else if (myConfig.isMassIngestionMode()) {
|
||||
|
@ -701,6 +659,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private boolean updateTags(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity) {
|
||||
Set<ResourceTag> allDefs = new HashSet<>();
|
||||
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
||||
|
||||
if (theResource instanceof IResource) {
|
||||
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
||||
} else {
|
||||
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||
if (def.isStandardType() == false) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
|
||||
ResourceTag tag = theEntity.addTag(profileDef);
|
||||
allDefs.add(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
|
||||
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
||||
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
||||
allTagsNew.forEach(tag -> {
|
||||
|
||||
// Don't keep duplicate tags
|
||||
if (!allDefsPresent.add(tag.getTag())) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
|
||||
// Drop any tags that have been removed
|
||||
if (!allDefs.contains(tag)) {
|
||||
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
theEntity.setHasTags(!allTagsNew.isEmpty());
|
||||
return !allTagsOld.equals(allTagsNew);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
|
||||
R retVal = (R) res;
|
||||
|
@ -1287,52 +1289,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
postUpdate(entity, (T) theResource);
|
||||
}
|
||||
|
||||
/*
|
||||
* Create history entry
|
||||
*/
|
||||
if (theCreateNewHistoryEntry) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = entity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(changed.getEncoding());
|
||||
historyEntry.setResource(changed.getResource());
|
||||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||
if (theResource != null) {
|
||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
source = MetaUtil.getSource(myContext, meta);
|
||||
}
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
provenance.setResourceTable(entity);
|
||||
provenance.setPartitionId(entity.getPartitionId());
|
||||
if (haveRequestId) {
|
||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||
}
|
||||
if (haveSource) {
|
||||
provenance.setSourceUri(source);
|
||||
}
|
||||
myEntityManager.persist(provenance);
|
||||
}
|
||||
createHistoryEntry(theRequest, theResource, entity, changed);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1414,6 +1372,51 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return entity;
|
||||
}
|
||||
|
||||
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResource());
|
||||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||
if (theResource != null) {
|
||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
source = MetaUtil.getSource(myContext, meta);
|
||||
}
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
provenance.setResourceTable(theEntity);
|
||||
provenance.setPartitionId(theEntity.getPartitionId());
|
||||
if (haveRequestId) {
|
||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||
}
|
||||
if (haveSource) {
|
||||
provenance.setSourceUri(source);
|
||||
}
|
||||
myEntityManager.persist(provenance);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, ResourceTable entity) {
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
if (!resourceType.equals(entity.getResourceType())) {
|
||||
|
|
|
@ -1409,7 +1409,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
translateSearchParams(theParams);
|
||||
translateListSearchParams(theParams);
|
||||
|
||||
notifySearchInterceptors(theParams, theRequest);
|
||||
|
||||
|
@ -1434,7 +1434,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void translateSearchParams(SearchParameterMap theParams) {
|
||||
private void translateListSearchParams(SearchParameterMap theParams) {
|
||||
Iterator<String> keyIterator = theParams.keySet().iterator();
|
||||
|
||||
// Translate _list=42 to _has=List:item:_id=42
|
||||
|
|
|
@ -20,15 +20,16 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
@Repository
|
||||
|
@ -70,4 +71,6 @@ public interface IMdmLinkDao extends JpaRepository<MdmLink, Long> {
|
|||
@Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult")
|
||||
List<MdmPidTuple> expandPidsByGoldenResourcePidAndMatchResult(@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
@Query("SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC")
|
||||
List<Long> findPidByResourceNameAndThreshold(@Param("resourceName") String theResourceName, @Param("highThreshold") Date theHighThreshold, Pageable thePageable);
|
||||
}
|
||||
|
|
|
@ -1,201 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.expunge;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
/**
|
||||
* DeleteExpunge is now performed using the {@link ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl} Spring Batch job.
|
||||
*/
|
||||
@Deprecated
|
||||
public class DeleteExpungeService {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
|
||||
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private ResourceTableFKProvider myResourceTableFKProvider;
|
||||
@Autowired
|
||||
private IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelper;
|
||||
|
||||
public DeleteMethodOutcome expungeByResourcePids(String theUrl, String theResourceName, Slice<Long> thePids, RequestDetails theRequest) {
|
||||
StopWatch w = new StopWatch();
|
||||
if (thePids.isEmpty()) {
|
||||
return new DeleteMethodOutcome();
|
||||
}
|
||||
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(String.class, theUrl);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
txTemplate.executeWithoutResult(t -> validateOkToDeleteAndExpunge(thePids));
|
||||
|
||||
ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber());
|
||||
AtomicLong expungedEntitiesCount = new AtomicLong();
|
||||
AtomicLong expungedResourcesCount = new AtomicLong();
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
|
||||
ourLog.info("Expunged a total of {} records", expungedEntitiesCount);
|
||||
|
||||
IBaseOperationOutcome oo;
|
||||
if (expungedResourcesCount.get() == 0) {
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
String message = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "unableToDeleteNotFound", theUrl);
|
||||
String severity = "warning";
|
||||
String code = "not-found";
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
|
||||
} else {
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
String message = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", expungedResourcesCount.get(), w.getMillis());
|
||||
String severity = "information";
|
||||
String code = "informational";
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
|
||||
}
|
||||
|
||||
DeleteMethodOutcome retval = new DeleteMethodOutcome();
|
||||
retval.setExpungedResourcesCount(expungedResourcesCount.get());
|
||||
retval.setExpungedEntitiesCount(expungedEntitiesCount.get());
|
||||
retval.setOperationOutcome(oo);
|
||||
return retval;
|
||||
}
|
||||
|
||||
public void validateOkToDeleteAndExpunge(Slice<Long> theAllTargetPids) {
|
||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
|
||||
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
||||
return;
|
||||
}
|
||||
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ResourceLink firstConflict = conflictResourceLinks.get(0);
|
||||
|
||||
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
|
||||
//actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
|
||||
//we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
|
||||
String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
|
||||
String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
|
||||
|
||||
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
|
||||
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
|
||||
}
|
||||
|
||||
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
|
||||
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
|
||||
if (theConflictResourceLinks.isEmpty()) {
|
||||
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
|
||||
// Filter out resource links for which we are planning to delete the source.
|
||||
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
|
||||
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
|
||||
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// We do this in two steps to avoid lock contention on this synchronized list
|
||||
theConflictResourceLinks.addAll(conflictResourceLinks);
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteInTransaction(String theResourceName, List<Long> thePidChunk, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
txTemplate.executeWithoutResult(t -> deleteAllRecordsLinkingTo(theResourceName, thePidChunk, theExpungedResourcesCount, theExpungedEntitiesCount, theRequest));
|
||||
}
|
||||
|
||||
private void deleteAllRecordsLinkingTo(String theResourceName, List<Long> thePids, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
|
||||
HookParams params = new HookParams()
|
||||
.add(String.class, theResourceName)
|
||||
.add(List.class, thePids)
|
||||
.add(AtomicLong.class, theExpungedEntitiesCount)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE_PID_LIST, params);
|
||||
|
||||
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
|
||||
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
|
||||
|
||||
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
|
||||
deleteRecordsByColumn(pidListString, resourceForeignKey, theExpungedEntitiesCount);
|
||||
}
|
||||
|
||||
// Lastly we need to delete records from the resource table all of these other tables link to:
|
||||
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
|
||||
int entitiesDeleted = deleteRecordsByColumn(pidListString, resourceTablePk, theExpungedEntitiesCount);
|
||||
theExpungedResourcesCount.addAndGet(entitiesDeleted);
|
||||
}
|
||||
|
||||
private int deleteRecordsByColumn(String thePidListString, ResourceForeignKey theResourceForeignKey, AtomicLong theExpungedEntitiesCount) {
|
||||
int entitesDeleted = myEntityManager.createNativeQuery("DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString).executeUpdate();
|
||||
ourLog.info("Expunged {} records from {}", entitesDeleted, theResourceForeignKey.table);
|
||||
theExpungedEntitiesCount.addAndGet(entitesDeleted);
|
||||
return entitesDeleted;
|
||||
}
|
||||
}
|
|
@ -45,9 +45,12 @@ public class MdmLinkExpandSvc {
|
|||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
|
||||
public MdmLinkExpandSvc() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
*
|
||||
* @param theResource The resource to MDM-Expand
|
||||
* @return A set of strings representing the FHIR IDs of the expanded resources.
|
||||
|
|
|
@ -21,8 +21,9 @@ package ca.uhn.fhir.jpa.delete.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.Job;
|
||||
|
@ -45,7 +46,7 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
|
|||
* Delete Expunge job.
|
||||
*/
|
||||
@Configuration
|
||||
public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public class DeleteExpungeJobConfig {
|
||||
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
|
||||
|
||||
@Autowired
|
||||
|
@ -53,11 +54,23 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
|||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
|
||||
|
||||
@Autowired
|
||||
private SqlExecutorWriter mySqlExecutorWriter;
|
||||
|
||||
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job deleteExpungeJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
public Job deleteExpungeJob() {
|
||||
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(deleteExpungeUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -66,10 +79,10 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
|||
public Step deleteExpungeUrlListStep() {
|
||||
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<String>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.reader(myReverseCronologicalBatchResourcePidReader)
|
||||
.processor(deleteExpungeProcessor())
|
||||
.writer(sqlExecutorWriter())
|
||||
.listener(pidCountRecorderListener())
|
||||
.writer(mySqlExecutorWriter)
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(deleteExpungePromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -51,6 +51,8 @@ public class ReindexEverythingJobConfig {
|
|||
private JobBuilderFactory myJobBuilderFactory;
|
||||
@Autowired
|
||||
private ReindexWriter myReindexWriter;
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Bean(name = REINDEX_EVERYTHING_JOB_NAME)
|
||||
@Lazy
|
||||
|
@ -66,7 +68,7 @@ public class ReindexEverythingJobConfig {
|
|||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(cronologicalBatchAllResourcePidReader())
|
||||
.writer(myReindexWriter)
|
||||
.listener(reindexEverythingPidCountRecorderListener())
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(reindexEverythingPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
@ -77,12 +79,6 @@ public class ReindexEverythingJobConfig {
|
|||
return new CronologicalBatchAllResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener reindexEverythingPidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener reindexEverythingPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
|
|
@ -21,15 +21,17 @@ package ca.uhn.fhir.jpa.reindex.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -44,7 +46,7 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME;
|
|||
* Reindex job.
|
||||
*/
|
||||
@Configuration
|
||||
public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public class ReindexJobConfig {
|
||||
public static final String REINDEX_URL_LIST_STEP_NAME = "reindex-url-list-step";
|
||||
|
||||
@Autowired
|
||||
|
@ -54,11 +56,20 @@ public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
|||
@Autowired
|
||||
private ReindexWriter myReindexWriter;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
|
||||
|
||||
@Bean(name = REINDEX_JOB_NAME)
|
||||
@Lazy
|
||||
public Job reindexJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
public Job reindexJob() {
|
||||
return myJobBuilderFactory.get(REINDEX_JOB_NAME)
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(reindexUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -67,9 +78,9 @@ public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
|||
public Step reindexUrlListStep() {
|
||||
return myStepBuilderFactory.get(REINDEX_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.reader(myReverseCronologicalBatchResourcePidReader)
|
||||
.writer(myReindexWriter)
|
||||
.listener(pidCountRecorderListener())
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(reindexPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -729,10 +729,10 @@ public class QueryStack {
|
|||
return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId);
|
||||
}
|
||||
|
||||
private Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
||||
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
||||
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
||||
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
public Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
||||
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
||||
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
||||
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
|
||||
String spnamePrefix = theParamName;
|
||||
|
||||
|
@ -794,31 +794,31 @@ public class QueryStack {
|
|||
|
||||
switch (targetParamDefinition.getParamType()) {
|
||||
case DATE:
|
||||
containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateDate(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case NUMBER:
|
||||
containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateNumber(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case QUANTITY:
|
||||
containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateQuantity(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case STRING:
|
||||
containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateString(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case TOKEN:
|
||||
containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateToken(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case COMPOSITE:
|
||||
containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateComposite(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theRequestPartitionId);
|
||||
break;
|
||||
case URI:
|
||||
containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateUri(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequest, theRequestPartitionId);
|
||||
break;
|
||||
case HAS:
|
||||
|
@ -1044,8 +1044,14 @@ public class QueryStack {
|
|||
Expression subSelect = new Subquery(sql);
|
||||
|
||||
join = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
||||
predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
|
||||
|
||||
if (theSourceJoinColumn == null) {
|
||||
predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
|
||||
} else {
|
||||
//-- for the resource link, need join with target_resource_id
|
||||
predicate = new InCondition(theSourceJoinColumn, subSelect).setNegate(true);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
TokenPredicateBuilder tokenJoin = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult();
|
||||
|
@ -1156,10 +1162,24 @@ public class QueryStack {
|
|||
break;
|
||||
case REFERENCE:
|
||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE))
|
||||
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
else
|
||||
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) {
|
||||
// TODO: The _contained parameter is not intended to control search chain interpretation like this.
|
||||
// See SMILE-2898 for details.
|
||||
// For now, leave the incorrect implementation alone, just in case someone is relying on it,
|
||||
// until the complete fix is available.
|
||||
andPredicates.add(createPredicateReferenceForContainedResource(null, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
} else if (isEligibleForContainedResourceSearch(nextAnd)) {
|
||||
// TODO for now, restrict contained reference traversal to the last reference in the chain
|
||||
// We don't seem to be indexing the outbound references of a contained resource, so we can't
|
||||
// include them in search chains.
|
||||
// It would be nice to eventually relax this constraint, but no client seems to be asking for it.
|
||||
andPredicates.add(toOrPredicate(
|
||||
createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId),
|
||||
createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)
|
||||
));
|
||||
} else {
|
||||
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
}
|
||||
}
|
||||
break;
|
||||
case STRING:
|
||||
|
@ -1237,6 +1257,14 @@ public class QueryStack {
|
|||
return toAndPredicate(andPredicates);
|
||||
}
|
||||
|
||||
private boolean isEligibleForContainedResourceSearch(List<? extends IQueryParameterType> nextAnd) {
|
||||
return myModelConfig.isIndexOnContainedResources() &&
|
||||
nextAnd.stream()
|
||||
.filter(t -> t instanceof ReferenceParam)
|
||||
.map(t -> (ReferenceParam) t)
|
||||
.noneMatch(t -> t.getChain().contains("."));
|
||||
}
|
||||
|
||||
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
|
||||
ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder();
|
||||
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);
|
||||
|
|
|
@ -221,11 +221,15 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
|
||||
SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
|
||||
|
||||
// Handle _id last, since it can typically be tacked onto a different parameter
|
||||
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID)).collect(Collectors.toList());
|
||||
// Handle _id and _tag last, since they can typically be tacked onto a different parameter
|
||||
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID))
|
||||
.filter(t -> !t.equals(Constants.PARAM_TAG)).collect(Collectors.toList());
|
||||
if (myParams.containsKey(IAnyResource.SP_RES_ID)) {
|
||||
paramNames.add(IAnyResource.SP_RES_ID);
|
||||
}
|
||||
if (myParams.containsKey(Constants.PARAM_TAG)) {
|
||||
paramNames.add(Constants.PARAM_TAG);
|
||||
}
|
||||
|
||||
// Handle each parameter
|
||||
for (String nextParamName : paramNames) {
|
||||
|
@ -373,7 +377,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS)) {
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||
if (activeComboParams.isEmpty()) {
|
||||
sqlBuilder.setNeedResourceTableRoot(true);
|
||||
|
@ -483,6 +487,13 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
return Optional.of(executor);
|
||||
}
|
||||
|
||||
private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) {
|
||||
return myModelConfig.isIndexOnContainedResources() && theParams.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.flatMap(Collection::stream)
|
||||
.anyMatch(t -> t instanceof ReferenceParam);
|
||||
}
|
||||
|
||||
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||
/*
|
||||
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
|
|
|
@ -38,20 +38,18 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.CompositeParam;
|
||||
|
@ -62,9 +60,12 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
|
|||
import ca.uhn.fhir.rest.param.SpecialParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||
|
@ -338,16 +339,28 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
boolean foundChainMatch = false;
|
||||
List<String> candidateTargetTypes = new ArrayList<>();
|
||||
List<Condition> orPredicates = new ArrayList<>();
|
||||
boolean paramInverted = false;
|
||||
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
|
||||
for (String nextType : resourceTypes) {
|
||||
String chain = theReferenceParam.getChain();
|
||||
|
||||
String remainingChain = null;
|
||||
int chainDotIndex = chain.indexOf('.');
|
||||
if (chainDotIndex != -1) {
|
||||
remainingChain = chain.substring(chainDotIndex + 1);
|
||||
chain = chain.substring(0, chainDotIndex);
|
||||
}
|
||||
String chain = theReferenceParam.getChain();
|
||||
|
||||
String remainingChain = null;
|
||||
int chainDotIndex = chain.indexOf('.');
|
||||
if (chainDotIndex != -1) {
|
||||
remainingChain = chain.substring(chainDotIndex + 1);
|
||||
chain = chain.substring(0, chainDotIndex);
|
||||
}
|
||||
|
||||
int qualifierIndex = chain.indexOf(':');
|
||||
String qualifier = null;
|
||||
if (qualifierIndex != -1) {
|
||||
qualifier = chain.substring(qualifierIndex);
|
||||
chain = chain.substring(0, qualifierIndex);
|
||||
}
|
||||
|
||||
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
||||
|
||||
for (String nextType : resourceTypes) {
|
||||
|
||||
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
|
||||
String subResourceName = typeDef.getName();
|
||||
|
@ -358,14 +371,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
continue;
|
||||
}
|
||||
|
||||
int qualifierIndex = chain.indexOf(':');
|
||||
String qualifier = null;
|
||||
if (qualifierIndex != -1) {
|
||||
qualifier = chain.substring(qualifierIndex);
|
||||
chain = chain.substring(0, qualifierIndex);
|
||||
}
|
||||
|
||||
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
||||
RuntimeSearchParam param = null;
|
||||
if (!isMeta) {
|
||||
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
|
||||
|
@ -383,6 +388,13 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
if (chainValue == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// For the token param, if it's a :not modifier, need switch OR to AND
|
||||
if (!paramInverted && chainValue instanceof TokenParam) {
|
||||
if (((TokenParam) chainValue).getModifier() == TokenParamModifier.NOT) {
|
||||
paramInverted = true;
|
||||
}
|
||||
}
|
||||
foundChainMatch = true;
|
||||
orValues.add(chainValue);
|
||||
}
|
||||
|
@ -399,7 +411,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE));
|
||||
|
||||
orPredicates.add(toAndPredicate(andPredicates));
|
||||
|
||||
}
|
||||
|
||||
if (candidateTargetTypes.isEmpty()) {
|
||||
|
@ -410,10 +421,17 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
warnAboutPerformanceOnUnqualifiedResources(theParamName, theRequest, candidateTargetTypes);
|
||||
}
|
||||
|
||||
Condition multiTypeOrPredicate = toOrPredicate(orPredicates);
|
||||
// If :not modifier for a token, switch OR with AND in the multi-type case
|
||||
Condition multiTypePredicate;
|
||||
if (paramInverted) {
|
||||
multiTypePredicate = toAndPredicate(orPredicates);
|
||||
} else {
|
||||
multiTypePredicate = toOrPredicate(orPredicates);
|
||||
}
|
||||
|
||||
List<String> pathsToMatch = createResourceLinkPaths(theResourceName, theParamName);
|
||||
Condition pathPredicate = createPredicateSourcePaths(pathsToMatch);
|
||||
return toAndPredicate(pathPredicate, multiTypeOrPredicate);
|
||||
return toAndPredicate(pathPredicate, multiTypePredicate);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -549,7 +549,7 @@ public class SearchQueryBuilder {
|
|||
}
|
||||
|
||||
public ComboCondition addPredicateLastUpdated(DateRangeParam theDateRange) {
|
||||
ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder();
|
||||
ResourceTablePredicateBuilder resourceTableRoot = getOrCreateResourceTablePredicateBuilder(false);
|
||||
|
||||
List<Condition> conditions = new ArrayList<>(2);
|
||||
if (theDateRange.getLowerBoundAsInstant() != null) {
|
||||
|
|
|
@ -172,6 +172,10 @@ public class MemoryCacheService {
|
|||
return (Cache<K, T>) myCaches.get(theCache);
|
||||
}
|
||||
|
||||
public long getEstimatedSize(CacheEnum theCache) {
|
||||
return getCache(theCache).estimatedSize();
|
||||
}
|
||||
|
||||
public enum CacheEnum {
|
||||
|
||||
TAG_DEFINITION(TagDefinitionCacheKey.class),
|
||||
|
|
|
@ -126,5 +126,8 @@ public class SqlQuery {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getSql(true, true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.r4.model.InstantType;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Embeddable;
|
||||
import javax.persistence.Embedded;
|
||||
import javax.persistence.EmbeddedId;
|
||||
import javax.persistence.Entity;
|
||||
|
@ -96,7 +97,7 @@ public class TestUtil {
|
|||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
ImmutableSet<ClassInfo> classes = ClassPath.from(TestUtil.class.getClassLoader()).getTopLevelClasses(packageName);
|
||||
ImmutableSet<ClassInfo> classes = ClassPath.from(TestUtil.class.getClassLoader()).getTopLevelClassesRecursive(packageName);
|
||||
Set<String> names = new HashSet<String>();
|
||||
|
||||
if (classes.size() <= 1) {
|
||||
|
@ -106,7 +107,8 @@ public class TestUtil {
|
|||
for (ClassInfo classInfo : classes) {
|
||||
Class<?> clazz = Class.forName(classInfo.getName());
|
||||
Entity entity = clazz.getAnnotation(Entity.class);
|
||||
if (entity == null) {
|
||||
Embeddable embeddable = clazz.getAnnotation(Embeddable.class);
|
||||
if (entity == null && embeddable == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -331,7 +331,8 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
BatchJobsConfig.BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME
|
||||
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME,
|
||||
BatchJobsConfig.MDM_CLEAR_JOB_NAME
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -63,6 +63,7 @@ import org.hl7.fhir.dstu3.model.AllergyIntolerance;
|
|||
import org.hl7.fhir.dstu3.model.Appointment;
|
||||
import org.hl7.fhir.dstu3.model.AuditEvent;
|
||||
import org.hl7.fhir.dstu3.model.Binary;
|
||||
import org.hl7.fhir.dstu3.model.BodySite;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.CarePlan;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
|
@ -95,6 +96,7 @@ import org.hl7.fhir.dstu3.model.Organization;
|
|||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.dstu3.model.Practitioner;
|
||||
import org.hl7.fhir.dstu3.model.PractitionerRole;
|
||||
import org.hl7.fhir.dstu3.model.Procedure;
|
||||
import org.hl7.fhir.dstu3.model.ProcedureRequest;
|
||||
import org.hl7.fhir.dstu3.model.Questionnaire;
|
||||
import org.hl7.fhir.dstu3.model.QuestionnaireResponse;
|
||||
|
@ -317,6 +319,12 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
@Qualifier("myTaskDaoDstu3")
|
||||
protected IFhirResourceDao<Task> myTaskDao;
|
||||
@Autowired
|
||||
@Qualifier("myBodySiteDaoDstu3")
|
||||
protected IFhirResourceDao<BodySite> myBodySiteDao;
|
||||
@Autowired
|
||||
@Qualifier("myProcedureDaoDstu3")
|
||||
protected IFhirResourceDao<Procedure> myProcedureDao;
|
||||
@Autowired
|
||||
protected ITermConceptDao myTermConceptDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myTermCodeSystemDao;
|
||||
|
|
|
@ -0,0 +1,388 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
|
||||
public class ChainingR4SearchTest extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
|
||||
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
|
||||
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
myDaoConfig.setCountSearchResultsUpTo(new DaoConfig().getCountSearchResultsUpTo());
|
||||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
|
||||
myModelConfig.setIndexOnContainedResources(false);
|
||||
myModelConfig.setIndexOnContainedResources(new ModelConfig().isIndexOnContainedResources());
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
myModelConfig.setIndexOnContainedResources(true);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources() throws Exception {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithAContainedResource() throws Exception {
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Patient p = new Patient();
|
||||
p.setId("pat");
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getContained().add(p);
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.setValue(new StringType("Test"));
|
||||
obs.getSubject().setReference("#pat");
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId(IdType.newRandomUuid());
|
||||
org.setName("HealthCo");
|
||||
myOrganizationDao.create(org, mySrd);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference(org.getId());
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||
// This is the case that is most relevant to SMILE-2899
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId("org");
|
||||
org.setName("HealthCo");
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.getContained().add(org);
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference("#org");
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() throws Exception {
|
||||
// We do not currently support this case - we may not be indexing the references of contained resources
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId(IdType.newRandomUuid());
|
||||
org.setName("HealthCo");
|
||||
myOrganizationDao.create(org, mySrd);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId("pat");
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference(org.getId());
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getContained().add(p);
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference("#pat");
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() throws Exception {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId(IdType.newRandomUuid());
|
||||
org.setName("HealthCo");
|
||||
myOrganizationDao.create(org, mySrd);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference(org.getId());
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||
// This is the case that is most relevant to SMILE-2899
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId("org");
|
||||
org.setName("HealthCo");
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.getContained().add(org);
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference("#org");
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId(IdType.newRandomUuid());
|
||||
org.setName("HealthCo");
|
||||
myOrganizationDao.create(org, mySrd);
|
||||
|
||||
Organization partOfOrg = new Organization();
|
||||
partOfOrg.setId(IdType.newRandomUuid());
|
||||
partOfOrg.getPartOf().setReference(org.getId());
|
||||
myOrganizationDao.create(partOfOrg, mySrd);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference(partOfOrg.getId());
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() throws Exception {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
{
|
||||
Organization org = new Organization();
|
||||
org.setId("parent");
|
||||
org.setName("HealthCo");
|
||||
|
||||
Organization partOfOrg = new Organization();
|
||||
partOfOrg.setId(IdType.newRandomUuid());
|
||||
partOfOrg.getContained().add(org);
|
||||
partOfOrg.getPartOf().setReference("#parent");
|
||||
myOrganizationDao.create(partOfOrg, mySrd);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addName().setFamily("Smith").addGiven("John");
|
||||
p.getManagingOrganization().setReference(partOfOrg.getId());
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference(p.getId());
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
assertThat(oids, contains(oid1.getIdPart()));
|
||||
}
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String theUrl) throws IOException {
|
||||
List<String> ids = new ArrayList<>();
|
||||
|
||||
ResourceSearch search = myMatchUrlService.getResourceSearch(theUrl);
|
||||
SearchParameterMap map = search.getSearchParameterMap();
|
||||
map.setLoadSynchronous(true);
|
||||
IBundleProvider result = myObservationDao.search(map);
|
||||
return result.getAllResourceIds();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Address;
|
||||
import org.hl7.fhir.r4.model.Address.AddressUse;
|
||||
|
@ -26,15 +26,13 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
||||
import java.util.stream.Collector;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class FhirResourceDaoR4ContainedTest extends BaseJpaR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4ContainedTest.class);
|
||||
|
||||
|
@ -80,7 +78,6 @@ public class FhirResourceDaoR4ContainedTest extends BaseJpaR4Test {
|
|||
map = new SearchParameterMap();
|
||||
map.add("subject", new ReferenceParam("name", "Smith"));
|
||||
map.setSearchContainedMode(SearchContainedModeEnum.TRUE);
|
||||
|
||||
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id)));
|
||||
}
|
||||
|
||||
|
@ -110,14 +107,16 @@ public class FhirResourceDaoR4ContainedTest extends BaseJpaR4Test {
|
|||
.getSingleResult();
|
||||
assertEquals(1L, i.longValue());
|
||||
});
|
||||
|
||||
|
||||
SearchParameterMap map;
|
||||
|
||||
map = new SearchParameterMap();
|
||||
map.add("subject", new ReferenceParam("name", "Smith"));
|
||||
map.setSearchContainedMode(SearchContainedModeEnum.TRUE);
|
||||
|
||||
map.setLoadSynchronous(true);
|
||||
|
||||
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id)));
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -16,23 +16,35 @@ import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
|||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BodyStructure;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.Device;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Procedure;
|
||||
import org.hl7.fhir.r4.model.Provenance;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -906,6 +918,192 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure that if we're performing a query where the ONLY param is _lastUpdated,
|
||||
* we include a selector for the resource type
|
||||
*/
|
||||
@Test
|
||||
public void testSearchByLastUpdatedOnly() {
|
||||
Patient p = new Patient();
|
||||
p.setId("B");
|
||||
myPatientDao.update(p);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setId("A");
|
||||
obs.setSubject(new Reference("Patient/B"));
|
||||
obs.setStatus(Observation.ObservationStatus.FINAL);
|
||||
myObservationDao.update(obs);
|
||||
|
||||
// Search using only a _lastUpdated param
|
||||
{
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.setLastUpdated(new DateRangeParam("ge2021-01-01", null));
|
||||
myCaptureQueriesListener.clear();
|
||||
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
|
||||
assertEquals(1, outcome.getResources(0, 999).size());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0"), selectQuery);
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'"), selectQuery);
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null"), selectQuery);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchOnUnderscoreParams_AvoidHFJResourceJoins() {
|
||||
// This Issue: https://github.com/hapifhir/hapi-fhir/issues/2942
|
||||
// See this PR for a similar type of Fix: https://github.com/hapifhir/hapi-fhir/pull/2909
|
||||
// SearchParam - focalAccess
|
||||
SearchParameter searchParameter1 = new SearchParameter();
|
||||
searchParameter1.addBase("BodySite").addBase("Procedure");
|
||||
searchParameter1.setCode("focalAccess");
|
||||
searchParameter1.setType(Enumerations.SearchParamType.REFERENCE);
|
||||
searchParameter1.setExpression("Procedure.extension('Procedure#focalAccess')");
|
||||
searchParameter1.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||
searchParameter1.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
IIdType sp1Id = mySearchParameterDao.create(searchParameter1).getId().toUnqualifiedVersionless();
|
||||
// SearchParam - focalAccess
|
||||
SearchParameter searchParameter2 = new SearchParameter();
|
||||
searchParameter2.addBase("Provenance");
|
||||
searchParameter2.setCode("activity");
|
||||
searchParameter2.setType(Enumerations.SearchParamType.TOKEN);
|
||||
searchParameter2.setExpression("Provenance.extension('Provenance#activity')");
|
||||
searchParameter2.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||
searchParameter2.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
IIdType sp2Id = mySearchParameterDao.create(searchParameter2).getId().toUnqualifiedVersionless();
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
BodyStructure bs = new BodyStructure();
|
||||
bs.setDescription("BodyStructure in R4 replaced BodySite from DSTU4");
|
||||
IIdType bsId = myBodyStructureDao.create(bs, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("P1");
|
||||
patient.setActive(true);
|
||||
patient.addName().setFamily("FamilyName");
|
||||
Extension extParent = patient
|
||||
.addExtension()
|
||||
.setUrl("http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity");
|
||||
extParent
|
||||
.addExtension()
|
||||
.setUrl("ombCategory")
|
||||
.setValue(new CodeableConcept().addCoding(new Coding().setSystem("urn:oid:2.16.840.1.113883.5.50")
|
||||
.setCode("2186-5")
|
||||
.setDisplay("Not Hispanic or Latino")));
|
||||
extParent
|
||||
.addExtension()
|
||||
.setUrl("text")
|
||||
.setValue(new StringType("Not Hispanic or Latino"));
|
||||
myPatientDao.update(patient);
|
||||
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().addCoding(new Coding().setSystem("acc_proccat_fkc")
|
||||
.setCode("CANN")
|
||||
.setDisplay("Cannulation"));
|
||||
Procedure procedure = new Procedure();
|
||||
procedure.setSubject(new Reference("Patient/P1"));
|
||||
procedure.setStatus(Procedure.ProcedureStatus.COMPLETED);
|
||||
procedure.setCategory(categoryCodeableConcept1);
|
||||
Extension extProcedure = procedure
|
||||
.addExtension()
|
||||
.setUrl("Procedure#focalAccess")
|
||||
.setValue(new UriType("BodyStructure/" + bsId.getIdPartAsLong()));
|
||||
procedure.getMeta()
|
||||
.addTag("acc_procext_fkc", "1STCANN2NDL", "First Successful Cannulation with 2 Needles");
|
||||
IIdType procedureId = myProcedureDao.create(procedure).getId().toUnqualifiedVersionless();
|
||||
|
||||
Device device = new Device();
|
||||
device.setManufacturer("Acme");
|
||||
IIdType deviceId = myDeviceDao.create(device).getId().toUnqualifiedVersionless();
|
||||
|
||||
Provenance provenance = new Provenance();
|
||||
provenance.setActivity(new CodeableConcept().addCoding(new Coding().setSystem("http://hl7.org/fhir/v3/DocumentCompletion").setCode("PA")));
|
||||
provenance.addAgent().setWho(new Reference(deviceId));
|
||||
IIdType provenanceId = myProvenanceDao.create(provenance).getId().toUnqualifiedVersionless();
|
||||
|
||||
logAllResources();
|
||||
logAllResourceTags();
|
||||
logAllResourceVersions();
|
||||
|
||||
// Search example 1:
|
||||
// http://FHIR_SERVER/fhir_request/Procedure
|
||||
// ?status%3Anot=entered-in-error&subject=B
|
||||
// &category=CANN&focalAccess=BodySite%2F3530342921&_tag=TagValue
|
||||
// NOTE: This gets sorted once so the order is different once it gets executed!
|
||||
{
|
||||
// IMPORTANT: Keep the query param order exactly as shown below!
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
// _tag, category, status, subject, focalAccess
|
||||
map.add("_tag", new TokenParam("TagValue"));
|
||||
map.add("category", new TokenParam("CANN"));
|
||||
map.add("status", new TokenParam("entered-in-error").setModifier(TokenParamModifier.NOT));
|
||||
map.add("subject", new ReferenceParam("Patient/P1"));
|
||||
map.add("focalAccess", new ReferenceParam("BodyStructure/" + bsId.getIdPart()));
|
||||
myCaptureQueriesListener.clear();
|
||||
IBundleProvider outcome = myProcedureDao.search(map, new SystemRequestDetails());
|
||||
ourLog.info("Search returned {} resources.", outcome.getResources(0, 999).size());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
// Check for a particular WHERE CLAUSE in the generated SQL to make sure we are verifying the correct query
|
||||
assertEquals(2, StringUtils.countMatches(selectQuery.toLowerCase(), " join hfj_res_link "), selectQuery);
|
||||
|
||||
// Ensure that we do NOT see a couple of particular WHERE clauses
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_type = 'procedure'"), selectQuery);
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_deleted_at is null"), selectQuery);
|
||||
}
|
||||
|
||||
// Search example 2:
|
||||
// http://FHIR_SERVER/fhir_request/Procedure
|
||||
// ?status%3Anot=entered-in-error&category=CANN&focalAccess=3692871435
|
||||
// &_tag=1STCANN1NDL%2C1STCANN2NDL&outcome=SUCCESS&_count=1&_requestTrace=True
|
||||
// NOTE: This gets sorted once so the order is different once it gets executed!
|
||||
{
|
||||
// IMPORTANT: Keep the query param order exactly as shown below!
|
||||
// NOTE: The "outcome" SearchParameter is not being used below, but it doesn't affect the test.
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
// _tag, category, status, focalAccess
|
||||
map.add("_tag", new TokenParam("TagValue"));
|
||||
map.add("category", new TokenParam("CANN"));
|
||||
map.add("status", new TokenParam("entered-in-error").setModifier(TokenParamModifier.NOT));
|
||||
map.add("focalAccess", new ReferenceParam("BodyStructure/" + bsId.getIdPart()));
|
||||
myCaptureQueriesListener.clear();
|
||||
IBundleProvider outcome = myProcedureDao.search(map, new SystemRequestDetails());
|
||||
ourLog.info("Search returned {} resources.", outcome.getResources(0, 999).size());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
// Check for a particular WHERE CLAUSE in the generated SQL to make sure we are verifying the correct query
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), " join hfj_res_link "), selectQuery);
|
||||
|
||||
// Ensure that we do NOT see a couple of particular WHERE clauses
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_type = 'procedure'"), selectQuery);
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_deleted_at is null"), selectQuery);
|
||||
}
|
||||
|
||||
// Search example 3:
|
||||
// http://FHIR_SERVER/fhir_request/Provenance
|
||||
// ?agent=Acme&activity=PA&_lastUpdated=ge2021-01-01&_requestTrace=True
|
||||
// NOTE: This gets sorted once so the order is different once it gets executed!
|
||||
{
|
||||
// IMPORTANT: Keep the query param order exactly as shown below!
|
||||
// NOTE: The "outcome" SearchParameter is not being used below, but it doesn't affect the test.
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.add("agent", new ReferenceParam("Device/" + deviceId.getIdPart()));
|
||||
map.add("activity", new TokenParam("PA"));
|
||||
DateRangeParam dateRangeParam = new DateRangeParam("ge2021-01-01", null);
|
||||
map.setLastUpdated(dateRangeParam);
|
||||
myCaptureQueriesListener.clear();
|
||||
IBundleProvider outcome = myProvenanceDao.search(map, new SystemRequestDetails());
|
||||
ourLog.info("Search returned {} resources.", outcome.getResources(0, 999).size());
|
||||
//assertEquals(1, outcome.getResources(0, 999).size());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
// Ensure that we do NOT see a couple of particular WHERE clauses
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_type = 'provenance'"), selectQuery);
|
||||
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), ".res_deleted_at is null"), selectQuery);
|
||||
}
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterResetDao() {
|
||||
|
|
|
@ -2272,7 +2272,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM = 'http://system'"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "t1.HASH_SYS_AND_VALUE ="), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, ".HASH_SYS_AND_VALUE ="), searchSql);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -111,7 +111,7 @@ public class ReindexJobTest extends BaseJpaR4Test {
|
|||
|
||||
private JobParameters buildEverythingJobParameters(Long theBatchSize) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import org.hl7.fhir.dstu3.model.BodySite;
|
||||
import org.hl7.fhir.dstu3.model.CodeableConcept;
|
||||
import org.hl7.fhir.dstu3.model.Coding;
|
||||
import org.hl7.fhir.dstu3.model.Encounter;
|
||||
import org.hl7.fhir.dstu3.model.Observation;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.dstu3.model.Procedure;
|
||||
import org.hl7.fhir.dstu3.model.Reference;
|
||||
import org.hl7.fhir.dstu3.model.SearchParameter;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class ResourceProviderSearchModifierDstu3Test extends BaseResourceProviderDstu3Test{
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
|
||||
@Test
|
||||
public void testReplicateBugWithNotDuringChain() {
|
||||
Encounter enc = new Encounter();
|
||||
enc.setType(Collections.singletonList(new CodeableConcept().addCoding(new Coding("system", "value", "display"))));
|
||||
IIdType encId = myEncounterDao.create(enc).getId();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setContext(new Reference(encId));
|
||||
myObservationDao.create(obs).getId();
|
||||
|
||||
{
|
||||
//Works when not chained:
|
||||
String encounterSearchString = "Encounter?type:not=system|value";
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(encounterSearchString);
|
||||
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
|
||||
IBundleProvider search = myEncounterDao.search(searchParameterMap);
|
||||
assertThat(search.size(), is(equalTo(0)));
|
||||
}
|
||||
{
|
||||
//Works without the NOT qualifier.
|
||||
String resultSearchString = "Observation?context.type=system|value";
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(resultSearchString);
|
||||
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
|
||||
IBundleProvider search = myObservationDao.search(searchParameterMap);
|
||||
assertThat(search.size(), is(equalTo(1)));
|
||||
}
|
||||
|
||||
{
|
||||
//Works in a chain
|
||||
String noResultSearchString = "Observation?context.type:not=system|value";
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(noResultSearchString);
|
||||
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
|
||||
IBundleProvider search = myObservationDao.search(searchParameterMap);
|
||||
assertThat(search.size(), is(equalTo(0)));
|
||||
}
|
||||
{
|
||||
//Works in a chain with only value
|
||||
String noResultSearchString = "Observation?context.type:not=value";
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(noResultSearchString);
|
||||
SearchParameterMap searchParameterMap = resourceSearch.getSearchParameterMap();
|
||||
IBundleProvider search = myObservationDao.search(searchParameterMap);
|
||||
assertThat(search.size(), is(equalTo(0)));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -15,6 +15,7 @@ import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
|||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.DecimalType;
|
||||
|
@ -99,8 +100,7 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
|
|||
assertEquals("Patient", interceptor.resourceDefs.get(0).getName());
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
||||
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
|
||||
Long jobId = jobIdPrimitive.getValue().longValue();
|
||||
Long jobId = BatchHelperR4.jobIdFromParameters(response);
|
||||
|
||||
assertEquals(1, myBatchJobHelper.getReadCount(jobId));
|
||||
assertEquals(1, myBatchJobHelper.getWriteCount(jobId));
|
||||
|
|
|
@ -199,6 +199,7 @@ public class ResourceProviderSearchModifierR4Test extends BaseResourceProviderR4
|
|||
assertEquals(obsList.get(5).toString(), ids.get(3));
|
||||
}
|
||||
|
||||
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String uri) throws IOException {
|
||||
List<String> ids;
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
|
|||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Bundle.BundleType;
|
||||
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
|
||||
|
@ -817,8 +818,7 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
|
|||
ourLog.info(ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME);
|
||||
|
||||
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
|
||||
Long jobId = jobIdPrimitive.getValue().longValue();
|
||||
Long jobId = BatchHelperR4.jobIdFromParameters(response);
|
||||
|
||||
// validate
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.mdm.config;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDeleteSvc;
|
||||
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor;
|
||||
import ca.uhn.fhir.jpa.mdm.broker.MdmMessageHandler;
|
||||
|
@ -31,10 +32,8 @@ import ca.uhn.fhir.jpa.mdm.dao.MdmLinkFactory;
|
|||
import ca.uhn.fhir.jpa.mdm.interceptor.IMdmStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.mdm.interceptor.MdmStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.GoldenResourceMergerSvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmClearSvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmControllerSvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmEidUpdateService;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmGoldenResourceDeletingSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkQuerySvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkSvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmLinkUpdaterSvcImpl;
|
||||
|
@ -52,8 +51,8 @@ import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchCriteriaBuilderSvc;
|
|||
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmGoldenResourceFindingSvc;
|
||||
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkUpdaterSvc;
|
||||
|
@ -183,8 +182,8 @@ public class MdmConsumerConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
IMdmExpungeSvc mdmResetSvc(MdmLinkDaoSvc theMdmLinkDaoSvc, MdmGoldenResourceDeletingSvc theDeletingSvc, IMdmSettings theIMdmSettings) {
|
||||
return new MdmClearSvcImpl(theMdmLinkDaoSvc, theDeletingSvc, theIMdmSettings);
|
||||
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
|
||||
return new MdmBatchJobSubmitterFactoryImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -251,4 +250,5 @@ public class MdmConsumerConfig {
|
|||
IMdmControllerSvc mdmControllerSvc() {
|
||||
return new MdmControllerSvcImpl();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,17 +21,15 @@ package ca.uhn.fhir.jpa.mdm.config;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDeleteSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.interceptor.MdmSubmitterInterceptorLoader;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmChannelSubmitterSvcImpl;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmGoldenResourceDeletingSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmSearchParamSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.MdmSubmitSvcImpl;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -60,11 +58,6 @@ public class MdmSubmitterConfig {
|
|||
return new MdmLinkDeleteSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
MdmGoldenResourceDeletingSvc mdmGoldenResourceDeletingSvc() {
|
||||
return new MdmGoldenResourceDeletingSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
IMdmChannelSubmitterSvc mdmChannelSubmitterSvc(FhirContext theFhirContext, IChannelFactory theChannelFactory) {
|
||||
|
@ -72,7 +65,7 @@ public class MdmSubmitterConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
IMdmSubmitSvc mdmBatchService(IMdmSettings theMdmSetting) {
|
||||
IMdmSubmitSvc mdmSubmitService() {
|
||||
return new MdmSubmitSvcImpl();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,13 +40,10 @@ import org.springframework.transaction.annotation.Transactional;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class MdmLinkDaoSvc {
|
||||
|
||||
|
@ -227,46 +224,6 @@ public class MdmLinkDaoSvc {
|
|||
return myMdmLinkDao.findAll(example);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all {@link MdmLink} entities, and return all resource PIDs from the source of the relationship.
|
||||
*
|
||||
* @return A list of Long representing the related Golden Resource Pids.
|
||||
*/
|
||||
@Transactional
|
||||
public List<Long> deleteAllMdmLinksAndReturnGoldenResourcePids() {
|
||||
List<MdmLink> all = myMdmLinkDao.findAll();
|
||||
return deleteMdmLinksAndReturnGoldenResourcePids(all);
|
||||
}
|
||||
|
||||
private List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<MdmLink> theLinks) {
|
||||
Set<Long> goldenResources = theLinks.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
|
||||
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
|
||||
//and REDIRECT
|
||||
goldenResources.addAll(theLinks.stream()
|
||||
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|
||||
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
|
||||
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
|
||||
ourLog.info("Deleting {} MDM link records...", theLinks.size());
|
||||
myMdmLinkDao.deleteAll(theLinks);
|
||||
ourLog.info("{} MDM link records deleted", theLinks.size());
|
||||
return new ArrayList<>(goldenResources);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a valid {@link String}, delete all {@link MdmLink} entities for that type, and get the Pids
|
||||
* for the Golden Resources which were the sources of the links.
|
||||
*
|
||||
* @param theSourceType the type of relationship you would like to delete.
|
||||
* @return A list of longs representing the Pids of the Golden Resources resources used as the sources of the relationships that were deleted.
|
||||
*/
|
||||
public List<Long> deleteAllMdmLinksOfTypeAndReturnGoldenResourcePids(String theSourceType) {
|
||||
MdmLink link = new MdmLink();
|
||||
link.setMdmSourceType(theSourceType);
|
||||
Example<MdmLink> exampleLink = Example.of(link);
|
||||
List<MdmLink> allOfType = myMdmLinkDao.findAll(exampleLink);
|
||||
return deleteMdmLinksAndReturnGoldenResourcePids(allOfType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist an MDM link to the database.
|
||||
*
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.mdm.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Master Data Management
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class is responsible for clearing out existing MDM links, as well as deleting all Golden Resources related to those MDM Links.
|
||||
*/
|
||||
public class MdmClearSvcImpl implements IMdmExpungeSvc {
|
||||
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
|
||||
|
||||
final MdmLinkDaoSvc myMdmLinkDaoSvc;
|
||||
final MdmGoldenResourceDeletingSvc myMdmGoldenResourceDeletingSvcImpl;
|
||||
final IMdmSettings myMdmSettings;
|
||||
|
||||
@Autowired
|
||||
public MdmClearSvcImpl(MdmLinkDaoSvc theMdmLinkDaoSvc, MdmGoldenResourceDeletingSvc theMdmGoldenResourceDeletingSvcImpl, IMdmSettings theIMdmSettings) {
|
||||
myMdmLinkDaoSvc = theMdmLinkDaoSvc;
|
||||
myMdmGoldenResourceDeletingSvcImpl = theMdmGoldenResourceDeletingSvcImpl;
|
||||
myMdmSettings = theIMdmSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long expungeAllMdmLinksOfSourceType(String theSourceResourceType, ServletRequestDetails theRequestDetails) {
|
||||
throwExceptionIfInvalidSourceResourceType(theSourceResourceType);
|
||||
ourLog.info("Clearing all MDM Links for resource type {}...", theSourceResourceType);
|
||||
List<Long> goldenResourcePids = myMdmLinkDaoSvc.deleteAllMdmLinksOfTypeAndReturnGoldenResourcePids(theSourceResourceType);
|
||||
DeleteMethodOutcome deleteOutcome = myMdmGoldenResourceDeletingSvcImpl.expungeGoldenResourcePids(goldenResourcePids, theSourceResourceType, theRequestDetails);
|
||||
ourLog.info("MDM clear operation complete. Removed {} MDM links and {} Golden Resources.", goldenResourcePids.size(), deleteOutcome.getExpungedResourcesCount());
|
||||
return goldenResourcePids.size();
|
||||
}
|
||||
|
||||
private void throwExceptionIfInvalidSourceResourceType(String theResourceType) {
|
||||
if (!myMdmSettings.isSupportedMdmType(theResourceType)) {
|
||||
throw new InvalidRequestException(ProviderConstants.MDM_CLEAR + " does not support resource type: " + theResourceType);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long expungeAllMdmLinks(ServletRequestDetails theRequestDetails) {
|
||||
ourLog.info("Clearing all MDM Links...");
|
||||
long retVal = 0;
|
||||
|
||||
for(String mdmType : myMdmSettings.getMdmRules().getMdmTypes()) {
|
||||
List<Long> goldenResourcePids = myMdmLinkDaoSvc.deleteAllMdmLinksAndReturnGoldenResourcePids();
|
||||
DeleteMethodOutcome deleteOutcome = myMdmGoldenResourceDeletingSvcImpl.expungeGoldenResourcePids(goldenResourcePids, null, theRequestDetails);
|
||||
ourLog.info("MDM clear operation on type {} complete. Removed {} MDM links and expunged {} Golden resources.", mdmType, goldenResourcePids.size(), deleteOutcome.getExpungedResourcesCount());
|
||||
retVal += goldenResourcePids.size();
|
||||
}
|
||||
ourLog.info("MDM clear completed expunged with a total of {} golden resources cleared.", retVal);
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
@ -20,7 +20,9 @@ package ca.uhn.fhir.jpa.mdm.svc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkUpdaterSvc;
|
||||
|
@ -31,14 +33,20 @@ import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
|
|||
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
||||
import ca.uhn.fhir.mdm.provider.MdmControllerHelper;
|
||||
import ca.uhn.fhir.mdm.provider.MdmControllerUtil;
|
||||
import ca.uhn.fhir.rest.server.provider.MultiUrlProcessor;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class acts as a layer between MdmProviders and MDM services to support a REST API that's not a FHIR Operation API.
|
||||
|
@ -46,6 +54,8 @@ import javax.annotation.Nullable;
|
|||
@Service
|
||||
public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
||||
|
||||
@Autowired
|
||||
FhirContext myFhirContext;
|
||||
@Autowired
|
||||
MdmControllerHelper myMdmControllerHelper;
|
||||
@Autowired
|
||||
|
@ -54,6 +64,11 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
IMdmLinkQuerySvc myMdmLinkQuerySvc;
|
||||
@Autowired
|
||||
IMdmLinkUpdaterSvc myIMdmLinkUpdaterSvc;
|
||||
@Autowired
|
||||
IMdmBatchJobSubmitterFactory myMdmBatchJobSubmitterFactory;
|
||||
|
||||
public MdmControllerSvcImpl() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext) {
|
||||
|
@ -91,6 +106,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
return myIMdmLinkUpdaterSvc.updateLink(goldenResource, source, matchResult, theMdmTransactionContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseParameters submitMdmClearJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails) {
|
||||
MultiUrlProcessor multiUrlProcessor = new MultiUrlProcessor(myFhirContext, myMdmBatchJobSubmitterFactory.getClearJobSubmitter());
|
||||
return multiUrlProcessor.processUrls(theUrls, multiUrlProcessor.getBatchSize(theBatchSize), theRequestDetails);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notDuplicateGoldenResource(String theGoldenResourceId, String theTargetGoldenResourceId, MdmTransactionContext theMdmTransactionContext) {
|
||||
IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId);
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.mdm.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Master Data Management
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Service
|
||||
public class MdmGoldenResourceDeletingSvc {
|
||||
|
||||
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
|
||||
|
||||
/**
|
||||
* This is here for the case of possible infinite loops. Technically batch conflict deletion should handle this, but this is an escape hatch.
|
||||
*/
|
||||
private static final int MAXIMUM_DELETE_ATTEMPTS = 100000;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private ExpungeService myExpungeService;
|
||||
@Autowired
|
||||
DeleteExpungeService myDeleteExpungeService;
|
||||
|
||||
public DeleteMethodOutcome expungeGoldenResourcePids(List<Long> theGoldenResourcePids, String theResourceType, ServletRequestDetails theRequestDetails) {
|
||||
return myDeleteExpungeService.expungeByResourcePids(ProviderConstants.MDM_CLEAR, theResourceType, new SliceImpl<>(theGoldenResourcePids), theRequestDetails);
|
||||
}
|
||||
}
|
|
@ -20,10 +20,6 @@ package ca.uhn.fhir.jpa.mdm.svc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
|
@ -31,6 +27,10 @@ import ca.uhn.fhir.jpa.dao.IResultIterator;
|
|||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.mdm.api.IMdmChannelSubmitterSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -69,6 +69,9 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc {
|
|||
|
||||
private int myBufferSize = DEFAULT_BUFFER_SIZE;
|
||||
|
||||
public MdmSubmitSvcImpl() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public long submitAllSourceTypesToMdm(@Nullable String theCriteria) {
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
package ca.uhn.fhir.jpa.mdm.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.mdm.helper.MdmLinkHelper;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
|
||||
import ca.uhn.fhir.jpa.mdm.helper.MdmLinkHelper;
|
||||
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.springframework.batch.core.explore.JobExplorer;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -41,4 +41,9 @@ public abstract class BaseTestMdmConfig {
|
|||
MdmLinkHelper mdmLinkHelper() {
|
||||
return new MdmLinkHelper();
|
||||
}
|
||||
|
||||
@Bean
|
||||
BatchJobHelper batchJobHelper(JobExplorer theJobExplorer) {
|
||||
return new BatchJobHelper(theJobExplorer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.mdm.provider;
|
||||
|
||||
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
@ -51,6 +51,8 @@ public abstract class BaseLinkR4Test extends BaseProviderR4Test {
|
|||
saveLink(myLink);
|
||||
assertEquals(MdmLinkSourceEnum.AUTO, myLink.getLinkSource());
|
||||
myDaoConfig.setExpungeEnabled(true);
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
myDaoConfig.setDeleteExpungeEnabled(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -1,23 +1,29 @@
|
|||
package ca.uhn.fhir.jpa.mdm.provider;
|
||||
|
||||
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.DefaultResourceLoader;
|
||||
import org.springframework.core.io.Resource;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class BaseProviderR4Test extends BaseMdmR4Test {
|
||||
MdmProviderDstu3Plus myMdmProvider;
|
||||
|
@ -26,11 +32,13 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
|
|||
@Autowired
|
||||
private IMdmControllerSvc myMdmControllerSvc;
|
||||
@Autowired
|
||||
private IMdmExpungeSvc myMdmExpungeSvc;
|
||||
private IMdmClearJobSubmitter myMdmClearJobSubmitter;
|
||||
@Autowired
|
||||
private IMdmSubmitSvc myMdmSubmitSvc;
|
||||
@Autowired
|
||||
private MdmSettings myMdmSettings;
|
||||
@Autowired
|
||||
BatchJobHelper myBatchJobHelper;
|
||||
|
||||
private String defaultScript;
|
||||
|
||||
|
@ -44,13 +52,31 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
|
|||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmExpungeSvc, myMdmSubmitSvc);
|
||||
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmSubmitSvc, myMdmSettings);
|
||||
defaultScript = myMdmSettings.getScriptText();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void after() throws IOException {
|
||||
super.after();
|
||||
myMdmSettings.setScriptText(defaultScript);
|
||||
myMdmResourceMatcherSvc.init();// This bugger creates new objects from the beans and then ignores them.
|
||||
}
|
||||
|
||||
protected void clearMdmLinks() {
|
||||
Parameters result = (Parameters) myMdmProvider.clearMdmLinks(null, null, myRequestDetails);
|
||||
myBatchJobHelper.awaitJobExecution(BatchHelperR4.jobIdFromParameters(result));
|
||||
}
|
||||
|
||||
protected void clearMdmLinks(String theResourceName) {
|
||||
Parameters result = (Parameters) myMdmProvider.clearMdmLinks(getResourceNames(theResourceName), null, myRequestDetails);
|
||||
myBatchJobHelper.awaitJobExecution(BatchHelperR4.jobIdFromParameters(result));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected List<IPrimitiveType<String>> getResourceNames(String theResourceName) {
|
||||
List<IPrimitiveType<String>> resourceNames = new ArrayList<>();
|
||||
resourceNames.add(new StringType(theResourceName));
|
||||
return resourceNames;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
|
|||
@Test
|
||||
public void testBatchRunOnAllMedications() throws InterruptedException {
|
||||
StringType criteria = null;
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
|
||||
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchOnAllSourceResources(new StringType("Medication"), criteria, null));
|
||||
assertLinkCount(1);
|
||||
|
@ -77,32 +77,33 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
|
|||
@Test
|
||||
public void testBatchRunOnAllPractitioners() throws InterruptedException {
|
||||
StringType criteria = null;
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
|
||||
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPractitionerType(criteria, null));
|
||||
assertLinkCount(1);
|
||||
}
|
||||
@Test
|
||||
public void testBatchRunOnSpecificPractitioner() throws InterruptedException {
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPractitionerInstance(myPractitioner.getIdElement(), null));
|
||||
assertLinkCount(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchRunOnNonExistentSpecificPractitioner() {
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
try {
|
||||
myMdmProvider.mdmBatchPractitionerInstance(new IdType("Practitioner/999"), null);
|
||||
fail();
|
||||
} catch (ResourceNotFoundException e){}
|
||||
} catch (ResourceNotFoundException e) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchRunOnAllPatients() throws InterruptedException {
|
||||
assertLinkCount(3);
|
||||
StringType criteria = null;
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPatientType(criteria, null));
|
||||
assertLinkCount(1);
|
||||
}
|
||||
|
@ -110,7 +111,7 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
|
|||
@Test
|
||||
public void testBatchRunOnSpecificPatient() throws InterruptedException {
|
||||
assertLinkCount(3);
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
afterMdmLatch.runWithExpectedCount(1, () -> myMdmProvider.mdmBatchPatientInstance(myPatient.getIdElement(), null));
|
||||
assertLinkCount(1);
|
||||
}
|
||||
|
@ -118,18 +119,19 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
|
|||
@Test
|
||||
public void testBatchRunOnNonExistentSpecificPatient() {
|
||||
assertLinkCount(3);
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
try {
|
||||
myMdmProvider.mdmBatchPatientInstance(new IdType("Patient/999"), null);
|
||||
fail();
|
||||
} catch (ResourceNotFoundException e){}
|
||||
} catch (ResourceNotFoundException e) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchRunOnAllTypes() throws InterruptedException {
|
||||
assertLinkCount(3);
|
||||
StringType criteria = new StringType("");
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
afterMdmLatch.runWithExpectedCount(3, () -> {
|
||||
myMdmProvider.mdmBatchOnAllSourceResources(null, criteria, null);
|
||||
});
|
||||
|
@ -140,12 +142,12 @@ public class MdmProviderBatchR4Test extends BaseLinkR4Test {
|
|||
public void testBatchRunOnAllTypesWithInvalidCriteria() {
|
||||
assertLinkCount(3);
|
||||
StringType criteria = new StringType("death-date=2020-06-01");
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
|
||||
try {
|
||||
myMdmProvider.mdmBatchPractitionerType(criteria, null);
|
||||
fail();
|
||||
} catch(InvalidRequestException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
assertThat(e.getMessage(), is(equalTo("Failed to parse match URL[death-date=2020-06-01] - Resource type Practitioner does not have a parameter with name: death-date")));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,6 @@ import ca.uhn.fhir.rest.param.TokenParam;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
@ -30,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
||||
|
||||
protected Practitioner myPractitioner;
|
||||
protected StringType myPractitionerId;
|
||||
protected IAnyResource myPractitionerGoldenResource;
|
||||
|
@ -48,7 +46,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
@Test
|
||||
public void testClearAllLinks() {
|
||||
assertLinkCount(2);
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
assertNoLinksExist();
|
||||
}
|
||||
|
||||
|
@ -70,12 +68,13 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
assertLinkCount(2);
|
||||
Patient read = myPatientDao.read(new IdDt(mySourcePatientId.getValueAsString()).toVersionless());
|
||||
assertThat(read, is(notNullValue()));
|
||||
myMdmProvider.clearMdmLinks(new StringType("Patient"), myRequestDetails);
|
||||
clearMdmLinks("Patient");
|
||||
assertNoPatientLinksExist();
|
||||
try {
|
||||
myPatientDao.read(new IdDt(mySourcePatientId.getValueAsString()).toVersionless());
|
||||
fail();
|
||||
} catch (ResourceNotFoundException e) {}
|
||||
} catch (ResourceNotFoundException e) {
|
||||
}
|
||||
|
||||
}
|
||||
@Test
|
||||
|
@ -87,7 +86,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
Patient patientAndUpdateLinks = createPatientAndUpdateLinks(buildJanePatient());
|
||||
IAnyResource goldenResource = getGoldenResourceFromTargetResource(patientAndUpdateLinks);
|
||||
assertThat(goldenResource, is(notNullValue()));
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
assertNoPatientLinksExist();
|
||||
goldenResource = getGoldenResourceFromTargetResource(patientAndUpdateLinks);
|
||||
assertThat(goldenResource, is(nullValue()));
|
||||
|
@ -104,7 +103,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
linkGoldenResources(goldenResourceFromTarget, goldenResourceFromTarget2);
|
||||
|
||||
//SUT
|
||||
myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
|
||||
assertNoPatientLinksExist();
|
||||
IBundleProvider search = myPatientDao.search(buildGoldenResourceParameterMap());
|
||||
|
@ -135,7 +134,7 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
linkGoldenResources(goldenResourceFromTarget2, goldenResourceFromTarget);
|
||||
|
||||
//SUT
|
||||
IBaseParameters parameters = myMdmProvider.clearMdmLinks(null, myRequestDetails);
|
||||
clearMdmLinks();
|
||||
|
||||
printLinks();
|
||||
|
||||
|
@ -157,18 +156,19 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
|
|||
assertLinkCount(2);
|
||||
Practitioner read = myPractitionerDao.read(new IdDt(myPractitionerGoldenResourceId.getValueAsString()).toVersionless());
|
||||
assertThat(read, is(notNullValue()));
|
||||
myMdmProvider.clearMdmLinks(new StringType("Practitioner"), myRequestDetails);
|
||||
clearMdmLinks("Practitioner");
|
||||
assertNoPractitionerLinksExist();
|
||||
try {
|
||||
myPractitionerDao.read(new IdDt(myPractitionerGoldenResourceId.getValueAsString()).toVersionless());
|
||||
fail();
|
||||
} catch (ResourceNotFoundException e) {}
|
||||
} catch (ResourceNotFoundException e) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClearInvalidTargetType() {
|
||||
try {
|
||||
myMdmProvider.clearMdmLinks(new StringType("Observation"), myRequestDetails);
|
||||
myMdmProvider.clearMdmLinks(getResourceNames("Observation"), null, myRequestDetails);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertThat(e.getMessage(), is(equalTo("$mdm-clear does not support resource type: Observation")));
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
package ca.uhn.fhir.mdm.api;
|
||||
|
||||
public interface IMdmBatchJobSubmitterFactory {
|
||||
IMdmClearJobSubmitter getClearJobSubmitter();
|
||||
}
|
|
@ -20,24 +20,10 @@ package ca.uhn.fhir.mdm.api;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IMultiUrlJobSubmitter;
|
||||
|
||||
public interface IMdmExpungeSvc {
|
||||
|
||||
/**
|
||||
* Given a resource type, delete the underlying MDM links, and their related golden resource objects.
|
||||
*
|
||||
* @param theSourceResourceType The type of resources
|
||||
* @param theRequestDetails
|
||||
* @return the count of deleted MDM links
|
||||
*/
|
||||
long expungeAllMdmLinksOfSourceType(String theSourceResourceType, ServletRequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* Delete all MDM links, and their related golden resource objects.
|
||||
*
|
||||
* @return the count of deleted MDM links
|
||||
* @param theRequestDetails
|
||||
*/
|
||||
long expungeAllMdmLinks(ServletRequestDetails theRequestDetails);
|
||||
/**
|
||||
* Tag interface for Spring autowiring
|
||||
*/
|
||||
public interface IMdmClearJobSubmitter extends IMultiUrlJobSubmitter {
|
||||
}
|
|
@ -22,10 +22,15 @@ package ca.uhn.fhir.mdm.api;
|
|||
|
||||
import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
|
||||
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.data.domain.Page;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
|
||||
public interface IMdmControllerSvc {
|
||||
|
||||
|
@ -38,4 +43,6 @@ public interface IMdmControllerSvc {
|
|||
IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext);
|
||||
|
||||
IAnyResource updateLink(String theGoldenResourceId, String theSourceResourceId, String theMatchResult, MdmTransactionContext theMdmTransactionContext);
|
||||
|
||||
IBaseParameters submitMdmClearJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails);
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@ package ca.uhn.fhir.mdm.provider;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.mdm.api.MatchedTarget;
|
||||
import ca.uhn.fhir.mdm.api.MdmConstants;
|
||||
|
@ -31,7 +31,6 @@ import ca.uhn.fhir.mdm.api.MdmLinkJson;
|
|||
import ca.uhn.fhir.mdm.api.paging.MdmPageRequest;
|
||||
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
|
@ -58,10 +57,12 @@ import org.springframework.data.domain.Page;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.PARAM_OFFSET;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
@ -69,11 +70,10 @@ import static org.slf4j.LoggerFactory.getLogger;
|
|||
public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
||||
private static final Logger ourLog = getLogger(MdmProviderDstu3Plus.class);
|
||||
|
||||
|
||||
private final IMdmControllerSvc myMdmControllerSvc;
|
||||
private final IMdmMatchFinderSvc myMdmMatchFinderSvc;
|
||||
private final IMdmExpungeSvc myMdmExpungeSvc;
|
||||
private final IMdmSubmitSvc myMdmSubmitSvc;
|
||||
private final IMdmSettings myMdmSettings;
|
||||
|
||||
public static final int DEFAULT_PAGE_SIZE = 20;
|
||||
public static final int MAX_PAGE_SIZE = 100;
|
||||
|
@ -84,12 +84,12 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
* Note that this is not a spring bean. Any necessary injections should
|
||||
* happen in the constructor
|
||||
*/
|
||||
public MdmProviderDstu3Plus(FhirContext theFhirContext, IMdmControllerSvc theMdmControllerSvc, IMdmMatchFinderSvc theMdmMatchFinderSvc, IMdmExpungeSvc theMdmExpungeSvc, IMdmSubmitSvc theMdmSubmitSvc) {
|
||||
public MdmProviderDstu3Plus(FhirContext theFhirContext, IMdmControllerSvc theMdmControllerSvc, IMdmMatchFinderSvc theMdmMatchFinderSvc, IMdmSubmitSvc theMdmSubmitSvc, IMdmSettings theIMdmSettings) {
|
||||
super(theFhirContext);
|
||||
myMdmControllerSvc = theMdmControllerSvc;
|
||||
myMdmMatchFinderSvc = theMdmMatchFinderSvc;
|
||||
myMdmExpungeSvc = theMdmExpungeSvc;
|
||||
myMdmSubmitSvc = theMdmSubmitSvc;
|
||||
myMdmSettings = theIMdmSettings;
|
||||
}
|
||||
|
||||
@Operation(name = ProviderConstants.EMPI_MATCH, typeName = "Patient")
|
||||
|
@ -180,21 +180,33 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
);
|
||||
}
|
||||
|
||||
@Operation(name = ProviderConstants.MDM_CLEAR, returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "decimal")
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_CLEAR, returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "decimal")
|
||||
})
|
||||
public IBaseParameters clearMdmLinks(@OperationParam(name = ProviderConstants.MDM_CLEAR_SOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theSourceType,
|
||||
public IBaseParameters clearMdmLinks(@OperationParam(name = ProviderConstants.OPERATION_MDM_CLEAR_RESOURCE_NAME, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theResourceNames,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_CLEAR_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType<BigDecimal> theBatchSize,
|
||||
ServletRequestDetails theRequestDetails) {
|
||||
long resetCount;
|
||||
if (theSourceType == null || StringUtils.isBlank(theSourceType.getValue())) {
|
||||
resetCount = myMdmExpungeSvc.expungeAllMdmLinks(theRequestDetails);
|
||||
|
||||
List<String> resourceNames = new ArrayList<>();
|
||||
|
||||
|
||||
if (theResourceNames != null) {
|
||||
resourceNames.addAll(theResourceNames.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
|
||||
validateResourceNames(resourceNames);
|
||||
} else {
|
||||
resetCount = myMdmExpungeSvc.expungeAllMdmLinksOfSourceType(theSourceType.getValueAsString(), theRequestDetails);
|
||||
resourceNames.addAll(myMdmSettings.getMdmRules().getMdmTypes());
|
||||
}
|
||||
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_MDM_CLEAR_OUT_PARAM_DELETED_COUNT, resetCount);
|
||||
return retval;
|
||||
List<String> urls = resourceNames.stream().map(s -> s + "?").collect(Collectors.toList());
|
||||
return myMdmControllerSvc.submitMdmClearJob(urls, theBatchSize, theRequestDetails);
|
||||
}
|
||||
|
||||
private void validateResourceNames(List<String> theResourceNames) {
|
||||
for (String resourceName : theResourceNames) {
|
||||
if (!myMdmSettings.isSupportedMdmType(resourceName)) {
|
||||
throw new InvalidRequestException(ProviderConstants.OPERATION_MDM_CLEAR + " does not support resource type: " + resourceName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(name = ProviderConstants.MDM_QUERY_LINKS, idempotent = true)
|
||||
|
@ -202,9 +214,9 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theResourceId,
|
||||
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_MATCH_RESULT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theMatchResult,
|
||||
@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_LINK_SOURCE, min = 0, max = 1, typeName = "string")
|
||||
IPrimitiveType<String> theLinkSource,
|
||||
IPrimitiveType<String> theLinkSource,
|
||||
|
||||
@Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.")
|
||||
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.")
|
||||
@OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer")
|
||||
IPrimitiveType<Integer> theOffset,
|
||||
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.")
|
||||
|
@ -233,7 +245,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
|
||||
MdmPageRequest mdmPageRequest = new MdmPageRequest(theOffset, theCount, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE);
|
||||
|
||||
Page<MdmLinkJson> possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources(createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, (String) null), mdmPageRequest);
|
||||
Page<MdmLinkJson> possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources(createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, null), mdmPageRequest);
|
||||
|
||||
return parametersFromMdmLinks(possibleDuplicates, false, theRequestDetails, mdmPageRequest);
|
||||
}
|
||||
|
@ -255,7 +267,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters mdmBatchOnAllSourceResources(
|
||||
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theResourceType,
|
||||
|
@ -278,7 +290,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters mdmBatchPatientInstance(
|
||||
@IdParam IIdType theIdParam,
|
||||
|
@ -288,7 +300,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters mdmBatchPatientType(
|
||||
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType<String> theCriteria,
|
||||
|
@ -299,7 +311,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters mdmBatchPractitionerInstance(
|
||||
@IdParam IIdType theIdParam,
|
||||
|
@ -309,7 +321,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = {
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, typeName = "integer")
|
||||
@OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters mdmBatchPractitionerType(
|
||||
@OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType<String> theCriteria,
|
||||
|
@ -324,7 +336,7 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider {
|
|||
*/
|
||||
public IBaseParameters buildMdmOutParametersWithCount(long theCount) {
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, theCount);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, theCount);
|
||||
return retval;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,12 +23,9 @@ package ca.uhn.fhir.mdm.provider;
|
|||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmExpungeSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
@ -46,9 +43,9 @@ public class MdmProviderLoader {
|
|||
@Autowired
|
||||
private IMdmControllerSvc myMdmControllerSvc;
|
||||
@Autowired
|
||||
private IMdmExpungeSvc myMdmExpungeSvc;
|
||||
@Autowired
|
||||
private IMdmSubmitSvc myMdmSubmitSvc;
|
||||
@Autowired
|
||||
private IMdmSettings myMdmSettings;
|
||||
|
||||
private BaseMdmProvider myMdmProvider;
|
||||
|
||||
|
@ -57,7 +54,7 @@ public class MdmProviderLoader {
|
|||
case DSTU3:
|
||||
case R4:
|
||||
myResourceProviderFactory.addSupplier(() -> {
|
||||
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmExpungeSvc, myMdmSubmitSvc);
|
||||
myMdmProvider = new MdmProviderDstu3Plus(myFhirContext, myMdmControllerSvc, myMdmMatchFinderSvc, myMdmSubmitSvc, myMdmSettings);
|
||||
return myMdmProvider;
|
||||
});
|
||||
break;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -32,9 +32,11 @@ import java.math.BigDecimal;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class DeleteExpungeProvider extends BaseMultiUrlProcessor {
|
||||
public class DeleteExpungeProvider {
|
||||
private final MultiUrlProcessor myMultiUrlProcessor;
|
||||
|
||||
public DeleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
|
||||
super(theFhirContext, theDeleteExpungeJobSubmitter);
|
||||
myMultiUrlProcessor = new MultiUrlProcessor(theFhirContext, theDeleteExpungeJobSubmitter);
|
||||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false)
|
||||
|
@ -44,6 +46,7 @@ public class DeleteExpungeProvider extends BaseMultiUrlProcessor {
|
|||
RequestDetails theRequestDetails
|
||||
) {
|
||||
List<String> urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
|
||||
return super.processUrls(urls, getBatchSize(theBatchSize), theRequestDetails);
|
||||
Integer batchSize = myMultiUrlProcessor.getBatchSize(theBatchSize);
|
||||
return myMultiUrlProcessor.processUrls(urls, batchSize, theRequestDetails);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,20 +34,20 @@ import javax.annotation.Nullable;
|
|||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
|
||||
public class BaseMultiUrlProcessor {
|
||||
protected final FhirContext myFhirContext;
|
||||
public class MultiUrlProcessor {
|
||||
private final FhirContext myFhirContext;
|
||||
private final IMultiUrlJobSubmitter myMultiUrlProcessorJobSubmitter;
|
||||
|
||||
public BaseMultiUrlProcessor(FhirContext theFhirContext, IMultiUrlJobSubmitter theMultiUrlProcessorJobSubmitter) {
|
||||
public MultiUrlProcessor(FhirContext theFhirContext, IMultiUrlJobSubmitter theMultiUrlProcessorJobSubmitter) {
|
||||
myMultiUrlProcessorJobSubmitter = theMultiUrlProcessorJobSubmitter;
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
protected IBaseParameters processUrls(List<String> theUrlsToProcess, Integer theBatchSize, RequestDetails theRequestDetails) {
|
||||
public IBaseParameters processUrls(List<String> theUrlsToProcess, Integer theBatchSize, RequestDetails theRequestDetails) {
|
||||
try {
|
||||
JobExecution jobExecution = myMultiUrlProcessorJobSubmitter.submitJob(theBatchSize, theUrlsToProcess, theRequestDetails);
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
return retval;
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e);
|
||||
|
@ -55,7 +55,7 @@ public class BaseMultiUrlProcessor {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
protected Integer getBatchSize(IPrimitiveType<BigDecimal> theBatchSize) {
|
||||
public Integer getBatchSize(IPrimitiveType<BigDecimal> theBatchSize) {
|
||||
Integer batchSize = null;
|
||||
if (theBatchSize != null && !theBatchSize.isEmpty()) {
|
||||
batchSize = theBatchSize.getValue().intValue();
|
|
@ -84,21 +84,19 @@ public class ProviderConstants {
|
|||
public static final String MDM_DUPLICATE_GOLDEN_RESOURCES = "$mdm-duplicate-golden-resources";
|
||||
public static final String MDM_NOT_DUPLICATE = "$mdm-not-duplicate";
|
||||
|
||||
public static final String MDM_CLEAR = "$mdm-clear";
|
||||
public static final String MDM_CLEAR_SOURCE_TYPE = "sourceType";
|
||||
public static final String OPERATION_MDM_CLEAR = "$mdm-clear";
|
||||
public static final String OPERATION_MDM_CLEAR_RESOURCE_NAME = "resourceType";
|
||||
public static final String OPERATION_MDM_CLEAR_BATCH_SIZE = "batchSize";
|
||||
public static final String OPERATION_MDM_SUBMIT = "$mdm-submit";
|
||||
public static final String MDM_BATCH_RUN_CRITERIA = "criteria" ;
|
||||
public static final String OPERATION_MDM_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT = "submitted" ;
|
||||
public static final String OPERATION_MDM_CLEAR_OUT_PARAM_DELETED_COUNT = "deleted";
|
||||
public static final String MDM_BATCH_RUN_CRITERIA = "criteria";
|
||||
public static final String MDM_BATCH_RUN_RESOURCE_TYPE = "resourceType";
|
||||
|
||||
/**
|
||||
* CQL Operations
|
||||
*/
|
||||
public static final String CQL_EVALUATE_MEASURE = "$evaluate-measure";
|
||||
|
||||
/**
|
||||
* Operation name for the $meta operation
|
||||
* Operation name for the $meta operation
|
||||
*/
|
||||
public static final String OPERATION_META = "$meta";
|
||||
|
||||
|
@ -146,7 +144,7 @@ public class ProviderConstants {
|
|||
/**
|
||||
* The Spring Batch job id of the delete expunge job created by a $delete-expunge operation
|
||||
*/
|
||||
public static final String OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID = "jobId";
|
||||
public static final String OPERATION_BATCH_RESPONSE_JOB_ID = "jobId";
|
||||
|
||||
/**
|
||||
* Operation name for the $delete-expunge operation
|
||||
|
|
|
@ -37,11 +37,14 @@ import java.math.BigDecimal;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ReindexProvider extends BaseMultiUrlProcessor {
|
||||
public class ReindexProvider {
|
||||
private final FhirContext myFhirContext;
|
||||
private final IReindexJobSubmitter myReindexJobSubmitter;
|
||||
private final MultiUrlProcessor myMultiUrlProcessor;
|
||||
|
||||
public ReindexProvider(FhirContext theFhirContext, IReindexJobSubmitter theReindexJobSubmitter) {
|
||||
super(theFhirContext, theReindexJobSubmitter);
|
||||
myFhirContext = theFhirContext;
|
||||
myMultiUrlProcessor = new MultiUrlProcessor(theFhirContext, theReindexJobSubmitter);
|
||||
myReindexJobSubmitter = theReindexJobSubmitter;
|
||||
}
|
||||
|
||||
|
@ -53,12 +56,12 @@ public class ReindexProvider extends BaseMultiUrlProcessor {
|
|||
RequestDetails theRequestDetails
|
||||
) {
|
||||
Boolean everything = theEverything != null && theEverything.getValue();
|
||||
@Nullable Integer batchSize = getBatchSize(theBatchSize);
|
||||
@Nullable Integer batchSize = myMultiUrlProcessor.getBatchSize(theBatchSize);
|
||||
if (everything) {
|
||||
return processEverything(batchSize, theRequestDetails);
|
||||
} else if (theUrlsToReindex != null && !theUrlsToReindex.isEmpty()) {
|
||||
List<String> urls = theUrlsToReindex.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
|
||||
return super.processUrls(urls, batchSize, theRequestDetails);
|
||||
return myMultiUrlProcessor.processUrls(urls, batchSize, theRequestDetails);
|
||||
} else {
|
||||
throw new InvalidRequestException(ProviderConstants.OPERATION_REINDEX + " must specify either everything=true or provide at least one value for " + ProviderConstants.OPERATION_REINDEX_PARAM_URL);
|
||||
}
|
||||
|
@ -68,7 +71,7 @@ public class ReindexProvider extends BaseMultiUrlProcessor {
|
|||
try {
|
||||
JobExecution jobExecution = myReindexJobSubmitter.submitEverythingJob(theBatchSize, theRequestDetails);
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
return retval;
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue