3757 move delete expunge to batch2 (#3759)

* begin with failing test

* begin with failing test

* remove unused Spring Batch classes.
convert service classes

* move provider

* just one test left to fix

* fix test

* msg.code

* change log

* bumping hapi to PRE12

* fix test

* review feedback

* license update

* fix intermittent and status update atomicity

* restore url-level partition selection

* fix tests

Co-authored-by: Ken Stevens <ken@smilecdr.com>
This commit is contained in:
Ken Stevens 2022-07-09 17:25:55 -04:00 committed by GitHub
parent 839a1b53e9
commit 0526da080b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
143 changed files with 1225 additions and 2785 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -25,7 +25,7 @@ public final class Msg {
/**
* IMPORTANT: Please update the following comment after you add a new code
* Last code value: 2100
* Last code value: 2101
*/
private Msg() {}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
/**
* @deprecated we are in the process of converting to batch2
* @deprecated use IJobCoordinator instead
*/
@Deprecated
public interface IBatchJobSubmitter {

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,4 @@
---
type: change
issue: 3757
title: "The Delete Expunge operation has been moved from Spring Batch to Batch 2."

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -396,7 +396,7 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
public int countDeleteQueries() {
return getDeleteQueries()
.stream()
.map(t->t.getSize())
.map(SqlQuery::getSize)
.reduce(0, Integer::sum);
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -21,12 +21,10 @@ package ca.uhn.fhir.jpa.batch;
*/
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.mdm.batch.job.MdmClearJobConfig;
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
import ca.uhn.fhir.jpa.config.BatchJobRegisterer;
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
import ca.uhn.fhir.jpa.term.job.TermCodeSystemDeleteJobConfig;
import ca.uhn.fhir.jpa.term.job.TermCodeSystemVersionDeleteJobConfig;
import org.springframework.batch.core.configuration.JobRegistry;
@ -45,8 +43,6 @@ import org.springframework.context.annotation.Import;
CommonBatchJobConfig.class,
BulkExportJobConfig.class,
BulkImportJobConfig.class,
DeleteExpungeJobConfig.class,
MdmClearJobConfig.class,
TermCodeSystemDeleteJobConfig.class,
TermCodeSystemVersionDeleteJobConfig.class
// When you define a new batch job, add it here.

View File

@ -20,14 +20,8 @@ package ca.uhn.fhir.jpa.batch;
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -36,29 +30,6 @@ import org.springframework.context.annotation.Configuration;
public class CommonBatchJobConfig {
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
@Bean
public MultiUrlJobParameterValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
}
@Bean
@StepScope
public SqlExecutorWriter sqlExecutorWriter() {
return new SqlExecutorWriter();
}
@Bean
@StepScope
public PidReaderCounterListener pidCountRecorderListener() {
return new PidReaderCounterListener();
}
@Bean
@StepScope
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
return new ReverseCronologicalBatchResourcePidReader();
}
@Bean
@StepScope
public PidToIBaseResourceProcessor pidToResourceProcessor() {

View File

@ -1,67 +0,0 @@
package ca.uhn.fhir.jpa.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator;
/**
* This class will prevent a job from running any of the provided URLs are not valid on this server.
*/
public class MultiUrlJobParameterValidator implements JobParametersValidator {
public static String JOB_PARAM_OPERATION_NAME = "operation-name";
private final MatchUrlService myMatchUrlService;
private final DaoRegistry myDaoRegistry;
public MultiUrlJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
myMatchUrlService = theMatchUrlService;
myDaoRegistry = theDaoRegistry;
}
@Override
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
if (theJobParameters == null) {
throw new JobParametersInvalidException(Msg.code(1280) + "This job requires Parameters: [urlList]");
}
RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(BatchConstants.JOB_PARAM_REQUEST_LIST));
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
String url = partitionedUrl.getUrl();
try {
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url, partitionedUrl.getRequestPartitionId());
String resourceName = resourceSearch.getResourceName();
if (!myDaoRegistry.isResourceTypeSupported(resourceName)) {
throw new JobParametersInvalidException(Msg.code(1281) + "The resource type " + resourceName + " is not supported on this server.");
}
} catch (UnsupportedOperationException e) {
throw new JobParametersInvalidException(Msg.code(1282) + "Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + BatchConstants.JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
}
}
}
}

View File

@ -1,75 +0,0 @@
package ca.uhn.fhir.jpa.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
public class PartitionedUrlValidator {
@Autowired
MatchUrlService myMatchUrlService;
@Autowired
IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
FhirContext myFhirContext;
public PartitionedUrlValidator() {
}
/**
* This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request
*/
public RequestListJson buildRequestListJson(RequestDetails theRequest, List<String> theUrlsToProcess) {
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
for (String url : theUrlsToProcess) {
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap(), null);
partitionedUrls.add(new PartitionedUrl(url, requestPartitionId));
}
RequestListJson retval = new RequestListJson();
retval.setPartitionedUrls(partitionedUrls);
return retval;
}
public RequestPartitionId requestPartitionIdFromRequest(RequestDetails theRequest) {
Set<String> allResourceNames = myFhirContext.getResourceTypes();
SearchParameterMap map = SearchParameterMap.newSynchronous();
// Verify that the user has access to every resource type on the server:
for (String resourceName : allResourceNames) {
myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceName, map, null);
}
// Then return the partition for the Patient resource type. Note Patient was an arbitrary choice here.
return myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, "Patient", map, null);
}
}

View File

@ -1,61 +0,0 @@
package ca.uhn.fhir.jpa.batch.job.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
public class PartitionedUrl implements IModelJson {
@JsonProperty("url")
private String myUrl;
@JsonProperty("requestPartitionId")
private RequestPartitionId myRequestPartitionId;
public PartitionedUrl() {
}
public PartitionedUrl(String theUrl, RequestPartitionId theRequestPartitionId) {
myUrl = theUrl;
myRequestPartitionId = theRequestPartitionId;
}
public String getUrl() {
return myUrl;
}
public void setUrl(String theUrl) {
myUrl = theUrl;
}
public RequestPartitionId getRequestPartitionId() {
return myRequestPartitionId;
}
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
myRequestPartitionId = theRequestPartitionId;
}
public boolean isPartitioned() {
return myRequestPartitionId != null && !myRequestPartitionId.isDefaultPartition();
}
}

View File

@ -1,82 +0,0 @@
package ca.uhn.fhir.jpa.batch.job.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.JsonUtil;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.ArrayList;
import java.util.List;
/**
* Serialize a list of URLs and partition ids so Spring Batch can store it as a String
*/
public class RequestListJson implements IModelJson {
static final ObjectMapper ourObjectMapper = new ObjectMapper();
@JsonProperty("partitionedUrls")
private List<PartitionedUrl> myPartitionedUrls;
public static RequestListJson fromUrlStringsAndRequestPartitionIds(List<String> theUrls, List<RequestPartitionId> theRequestPartitionIds) {
assert theUrls.size() == theRequestPartitionIds.size();
RequestListJson retval = new RequestListJson();
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
for (int i = 0; i < theUrls.size(); ++i) {
partitionedUrls.add(new PartitionedUrl(theUrls.get(i), theRequestPartitionIds.get(i)));
}
retval.setPartitionedUrls(partitionedUrls);
return retval;
}
public static RequestListJson fromJson(String theJson) {
try {
return ourObjectMapper.readValue(theJson, RequestListJson.class);
} catch (JsonProcessingException e) {
throw new InternalErrorException(Msg.code(1283) + "Failed to decode " + RequestListJson.class);
}
}
public String toJson() {
return JsonUtil.serializeOrInvalidRequest(this);
}
@Override
public String toString() {
return "RequestListJson{" +
"myPartitionedUrls=" + myPartitionedUrls +
'}';
}
public List<PartitionedUrl> getPartitionedUrls() {
return myPartitionedUrls;
}
public void setPartitionedUrls(List<PartitionedUrl> thePartitionedUrls) {
myPartitionedUrls = thePartitionedUrls;
}
}

View File

@ -1,48 +0,0 @@
package ca.uhn.fhir.jpa.batch.listener;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.AfterRead;
import org.springframework.batch.core.annotation.BeforeStep;
import java.util.List;
/**
* Add the number of pids processed to the execution context so we can track progress of the job
*/
public class PidReaderCounterListener {
public static final String RESOURCE_TOTAL_PROCESSED = "resource.total.processed";
private StepExecution myStepExecution;
private Long myTotalPidsProcessed = 0L;
@BeforeStep
public void setStepExecution(StepExecution stepExecution) {
myStepExecution = stepExecution;
}
@AfterRead
public void afterRead(List<Long> thePids) {
myTotalPidsProcessed += thePids.size();
myStepExecution.getExecutionContext().putLong(RESOURCE_TOTAL_PROCESSED, myTotalPidsProcessed);
}
}

View File

@ -1,35 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.batch;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import org.springframework.beans.factory.annotation.Autowired;
public class MdmBatchJobSubmitterFactoryImpl implements IMdmBatchJobSubmitterFactory {
@Autowired
IMdmClearJobSubmitter myMdmClearJobSubmitter;
@Override
public IMdmClearJobSubmitter getClearJobSubmitter() {
return myMdmClearJobSubmitter;
}
}

View File

@ -1,85 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.batch;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.mdm.batch.job.ReverseCronologicalBatchMdmLinkPidReader;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.transaction.Transactional;
import java.util.List;
public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
@Autowired
DaoConfig myDaoConfig;
@Autowired
PartitionedUrlValidator myPartitionedUrlValidator;
@Autowired
IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
@Qualifier(BatchConstants.MDM_CLEAR_JOB_NAME)
private Job myMdmClearJob;
@Override
@Transactional(Transactional.TxType.NEVER)
public JobExecution submitJob(Integer theBatchSize, List<String> theUrls, RequestDetails theRequest) throws JobParametersInvalidException {
if (theBatchSize == null) {
theBatchSize = myDaoConfig.getExpungeBatchSize();
}
if (!myDaoConfig.canDeleteExpunge()) {
throw new ForbiddenOperationException(Msg.code(1278) + "Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
}
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrls);
for (String url : theUrls) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(String.class, url);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
}
JobParameters jobParameters = ReverseCronologicalBatchMdmLinkPidReader.buildJobParameters(ProviderConstants.OPERATION_MDM_CLEAR, theBatchSize, requestListJson);
return myBatchJobSubmitter.runJob(myMdmClearJob, jobParameters);
}
}

View File

@ -1,124 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import java.util.ArrayList;
import java.util.List;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.MDM_CLEAR_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
* $mdm-clear job.
*/
@Configuration
public class MdmClearJobConfig {
public static final String MDM_CLEAR_RESOURCE_LIST_STEP_NAME = "mdm-clear-resource-list-step";
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private DeleteExpungeProcessor myDeleteExpungeProcessor;
@Autowired
@Qualifier("deleteExpungePromotionListener")
private ExecutionContextPromotionListener myDeleteExpungePromotionListener;
@Autowired
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Autowired
private SqlExecutorWriter mySqlExecutorWriter;
@Bean(name = MDM_CLEAR_JOB_NAME)
@Lazy
public Job mdmClearJob() {
return myJobBuilderFactory.get(MDM_CLEAR_JOB_NAME)
.validator(myMultiUrlProcessorParameterValidator)
.start(mdmClearUrlListStep())
.build();
}
@Bean
public Step mdmClearUrlListStep() {
return myStepBuilderFactory.get(MDM_CLEAR_RESOURCE_LIST_STEP_NAME)
.<List<Long>, List<String>>chunk(1)
.reader(reverseCronologicalBatchMdmLinkPidReader())
.processor(deleteThenExpungeCompositeProcessor())
.writer(mySqlExecutorWriter)
.listener(myPidCountRecorderListener)
.listener(myDeleteExpungePromotionListener)
.build();
}
@Bean
@StepScope
public CompositeItemProcessor<List<Long>, List<String>> deleteThenExpungeCompositeProcessor() {
CompositeItemProcessor<List<Long>, List<String>> compositeProcessor = new CompositeItemProcessor<>();
List itemProcessors = new ArrayList<>();
itemProcessors.add(mdmLinkDeleter());
itemProcessors.add(myDeleteExpungeProcessor);
compositeProcessor.setDelegates(itemProcessors);
return compositeProcessor;
}
@Bean
@StepScope
public ReverseCronologicalBatchMdmLinkPidReader reverseCronologicalBatchMdmLinkPidReader() {
return new ReverseCronologicalBatchMdmLinkPidReader();
}
@Bean
public MdmLinkDeleter mdmLinkDeleter() {
return new MdmLinkDeleter();
}
@Bean
public ExecutionContextPromotionListener mdmClearPromotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
return listener;
}
}

View File

@ -1,85 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.stream.Collectors;
/**
* Take MdmLink pids in and output golden resource pids out
*/
public class MdmLinkDeleter implements ItemProcessor<List<Long>, List<Long>> {
public static final String PROCESS_NAME = "MdmClear";
public static final String THREAD_PREFIX = "mdmClear";
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkDeleter.class);
@Autowired
protected PlatformTransactionManager myTxManager;
@Autowired
IMdmLinkDao myMdmLinkDao;
@Autowired
DaoConfig myDaoConfig;
@Override
public List<Long> process(List<Long> thePidList) throws Exception {
ConcurrentLinkedQueue<Long> goldenPidAggregator = new ConcurrentLinkedQueue<>();
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
partitionRunner.runInPartitionedThreads(ResourcePersistentId.fromLongList(thePidList), pids -> removeLinks(pids, goldenPidAggregator));
return new ArrayList<>(goldenPidAggregator);
}
private void removeLinks(List<ResourcePersistentId> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(ResourcePersistentId.toLongList(pidList))));
}
public List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<Long> thePids) {
List<MdmLink> links = myMdmLinkDao.findAllById(thePids);
Set<Long> goldenResources = links.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
//and REDIRECT
goldenResources.addAll(links.stream()
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
ourLog.info("Deleting {} MDM link records...", links.size());
myMdmLinkDao.deleteAll(links);
ourLog.info("{} MDM link records deleted", links.size());
return new ArrayList<>(goldenResources);
}
}

View File

@ -1,59 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.batch.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.reader.BaseReverseCronologicalBatchPidReader;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This is the same as the parent class, except it operates on MdmLink entities instead of resource entities
*/
public class ReverseCronologicalBatchMdmLinkPidReader extends BaseReverseCronologicalBatchPidReader {
@Autowired
IMdmLinkDao myMdmLinkDao;
@Override
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
String resourceName = resourceSearch.getResourceName();
Pageable pageable = PageRequest.of(0, getBatchSize());
RequestPartitionId requestPartitionId = resourceSearch.getRequestPartitionId();
if (requestPartitionId.isAllPartitions()){
return new HashSet<>(myMdmLinkDao.findPidByResourceNameAndThreshold(resourceName, getCurrentHighThreshold(), pageable));
}
List<Integer> partitionIds = requestPartitionId.getPartitionIds();
//Expand out the list to handle the REDIRECT/POSSIBLE DUPLICATE ones.
return new HashSet<>(myMdmLinkDao.findPidByResourceNameAndThresholdAndPartitionId(resourceName, getCurrentHighThreshold(), partitionIds, pageable));
}
@Override
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
setDateExtractorFunction(pid -> myMdmLinkDao.findById(pid).get().getCreated());
}
}

View File

@ -1,240 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemStream;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
/**
* This Spring Batch reader takes 4 parameters:
* {@link BatchConstants#JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
* {@link BatchConstants#JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
* {@link BatchConstants#JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
* <p>
* The reader will return at most {@link BatchConstants#JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching entities are available. It returns the resources in reverse chronological order
* and stores where it's at in the Spring Batch execution context with the key {@link BatchConstants#CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
@Autowired
private FhirContext myFhirContext;
@Autowired
private MatchUrlService myMatchUrlService;
private List<PartitionedUrl> myPartitionedUrls;
private Integer myBatchSize;
private int myUrlIndex = 0;
private Date myStartTime;
private static String highKey(int theIndex) {
return BatchConstants.CURRENT_THRESHOLD_HIGH + "." + theIndex;
}
@Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map<String, JobParameter> map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
map.put(BatchConstants.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
map.put(BatchConstants.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(BatchConstants.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
@Autowired
public void setRequestListJson(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls();
}
@Autowired
public void setStartTime(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
@Override
public List<Long> read() throws Exception {
while (myUrlIndex < myPartitionedUrls.size()) {
List<Long> nextBatch = getNextBatch();
if (nextBatch.isEmpty()) {
++myUrlIndex;
continue;
}
return nextBatch;
}
return null;
}
protected List<Long> getNextBatch() {
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
myAlreadyProcessedPidsWithHighDate.putIfAbsent(myUrlIndex, new HashSet<>());
Set<Long> newPids = getNextPidBatch(resourceSearch);
if (ourLog.isDebugEnabled()) {
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
ourLog.debug("Results: {}", newPids);
}
setDateFromPidFunction(resourceSearch);
List<Long> retval = new ArrayList<>(newPids);
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(getCurrentHighThreshold(), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
return retval;
}
protected Date getCurrentHighThreshold() {
return myThresholdHighByUrlIndex.get(myUrlIndex);
}
protected void setDateExtractorFunction(Function<Long, Date> theDateExtractorFunction) {
myBatchDateThresholdUpdater.setDateFromPid(theDateExtractorFunction);
}
protected void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
SearchParameterMap map = resourceSearch.getSearchParameterMap();
DateRangeParam rangeParam = getDateRangeParam(resourceSearch);
map.setLastUpdated(rangeParam);
map.setLoadSynchronousUpTo(myBatchSize);
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
}
/**
* Evaluates the passed in {@link ResourceSearch} to see if it contains a non-null {@link DateRangeParam}.
*
* If one such {@link DateRangeParam} exists, we use that to determine the upper and lower bounds for the returned
* {@link DateRangeParam}. The {@link DateRangeParam#getUpperBound()} is compared to the
* {@link BaseReverseCronologicalBatchPidReader#getCurrentHighThreshold()}, and the lower of the two date values
* is used.
*
* If no {@link DateRangeParam} is set, we use the local {@link BaseReverseCronologicalBatchPidReader#getCurrentHighThreshold()}
* to create a {@link DateRangeParam}.
* @param resourceSearch The {@link ResourceSearch} to check.
* @return {@link DateRangeParam}
*/
private DateRangeParam getDateRangeParam(ResourceSearch resourceSearch) {
DateRangeParam rangeParam = resourceSearch.getSearchParameterMap().getLastUpdated();
if (rangeParam != null) {
if (rangeParam.getUpperBound() == null) {
rangeParam.setUpperBoundInclusive(getCurrentHighThreshold());
} else {
Date theUpperBound = (getCurrentHighThreshold() == null || rangeParam.getUpperBound().getValue().before(getCurrentHighThreshold()))
? rangeParam.getUpperBound().getValue() : getCurrentHighThreshold();
rangeParam.setUpperBoundInclusive(theUpperBound);
}
} else {
rangeParam = new DateRangeParam().setUpperBoundInclusive(getCurrentHighThreshold());
}
return rangeParam;
}
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
if (executionContext.containsKey(BatchConstants.CURRENT_URL_INDEX)) {
myUrlIndex = new Long(executionContext.getLong(BatchConstants.CURRENT_URL_INDEX)).intValue();
}
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index);
if (executionContext.containsKey(key)) {
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
} else {
myThresholdHighByUrlIndex.put(index, myStartTime);
}
}
}
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(BatchConstants.CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) {
executionContext.putLong(highKey(index), date.getTime());
}
}
}
@Override
public void close() throws ItemStreamException {
}
protected Integer getBatchSize() {
return myBatchSize;
}
@Autowired
public void setBatchSize(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}
protected Set<Long> getAlreadySeenPids() {
return myAlreadyProcessedPidsWithHighDate.get(myUrlIndex);
}
protected abstract Set<Long> getNextPidBatch(ResourceSearch resourceSearch);
protected abstract void setDateFromPidFunction(ResourceSearch resourceSearch);
}

View File

@ -1,96 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
public class BatchDateThresholdUpdater {
private static final Logger ourLog = LoggerFactory.getLogger(BatchDateThresholdUpdater.class);
private Function<Long, Date> myDateFromPid;
public BatchDateThresholdUpdater() {
}
public BatchDateThresholdUpdater(Function<Long, Date> theDateFromPid) {
myDateFromPid = theDateFromPid;
}
/**
* This method is used by batch jobs that process resource pids by date in multiple passes. It's used to ensure
* the same resource isn't processed twice. What it does is after a pass of processing pids, it sets
* the threshold date for the next pass from the last resource on the list and collects all of the resources that have that date into a temporary cache
* so that the caller can exclude those from the next pass.
*
* @param thePrevThreshold the date threshold from the previous pass
* @param theAlreadyProcessedPidsWithThresholdDate the set to load pids into that have the new threshold
* @param theProcessedPidsOrderedByDate the pids ordered by date (can be ascending or descending)
* @return the new date threshold (can be the same as the old threshold if all pids on the list share the same date)
*/
public Date updateThresholdAndCache(Date thePrevThreshold, Set<Long> theAlreadyProcessedPidsWithThresholdDate, List<Long> theProcessedPidsOrderedByDate) {
if (theProcessedPidsOrderedByDate.isEmpty()) {
return thePrevThreshold;
}
// Adjust the low threshold to be the last resource in the batch we found
Long pidOfLatestResourceInBatch = theProcessedPidsOrderedByDate.get(theProcessedPidsOrderedByDate.size() - 1);
Date latestUpdatedDate = myDateFromPid.apply(pidOfLatestResourceInBatch);
// The latest date has changed, create a new cache to store pids with that date
if (thePrevThreshold != latestUpdatedDate) {
theAlreadyProcessedPidsWithThresholdDate.clear();
}
theAlreadyProcessedPidsWithThresholdDate.add(pidOfLatestResourceInBatch);
Date newThreshold = latestUpdatedDate;
if (theProcessedPidsOrderedByDate.size() <= 1) {
return newThreshold;
}
// There is more than one resource in this batch, add any others with the same date. Assume the list is ordered by date.
for (int index = theProcessedPidsOrderedByDate.size() - 2; index >= 0; --index) {
Long pid = theProcessedPidsOrderedByDate.get(index);
Date newDate = myDateFromPid.apply(pid);
if (!latestUpdatedDate.equals(newDate)) {
break;
}
theAlreadyProcessedPidsWithThresholdDate.add(pid);
}
return newThreshold;
}
/**
* @param theDateFromPid this is a Function to extract a date from a resource id
* @return
*/
public BatchDateThresholdUpdater setDateFromPid(Function<Long, Date> theDateFromPid) {
myDateFromPid = theDateFromPid;
return this;
}
}

View File

@ -1,65 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.UUID;
/**
* This service is used by batch processes to search resources
*/
public class BatchResourceSearcher {
@Autowired
private SearchBuilderFactory mySearchBuilderFactory;
@Autowired
private DaoRegistry myDaoRegistry;
public IResultIterator performSearch(ResourceSearch theResourceSearch, Integer theBatchSize) {
String resourceName = theResourceSearch.getResourceName();
RequestPartitionId requestPartitionId = theResourceSearch.getRequestPartitionId();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceName);
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, theResourceSearch.getResourceType());
sb.setFetchSize(theBatchSize);
SystemRequestDetails requestDetails = buildSystemRequestDetails(requestPartitionId);
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(requestDetails, UUID.randomUUID().toString());
IResultIterator resultIter = sb.createQuery(theResourceSearch.getSearchParameterMap(), searchRuntimeDetails, requestDetails, requestPartitionId);
return resultIter;
}
@Nonnull
private SystemRequestDetails buildSystemRequestDetails(RequestPartitionId theRequestPartitionId) {
SystemRequestDetails retval = new SystemRequestDetails();
retval.setRequestPartitionId(theRequestPartitionId);
return retval;
}
}

View File

@ -1,163 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemStream;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This Spring Batch reader takes 3 parameters:
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search.
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
* {@link #JOB_PARAM_REQUEST_PARTITION}: (optional) The partition of resources to read
* <p>
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching resources are available. It returns the resources in reverse chronological order
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public class CronologicalBatchAllResourcePidReader implements ItemReader<List<Long>>, ItemStream {
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
public static final String JOB_PARAM_START_TIME = "start-time";
public static final String JOB_PARAM_REQUEST_PARTITION = "request-partition";
public static final String CURRENT_THRESHOLD_LOW = "current.threshold-low";
private static final Logger ourLog = LoggerFactory.getLogger(CronologicalBatchAllResourcePidReader.class);
private static final Date BEGINNING_OF_TIME = new Date(0);
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private DaoConfig myDaoConfig;
private Integer myBatchSize;
private Date myThresholdLow = BEGINNING_OF_TIME;
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater(this::dateFromPid);
private final Set<Long> myAlreadyProcessedPidsWithLowDate = new HashSet<>();
private Date myStartTime;
private RequestPartitionId myRequestPartitionId;
@Autowired
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}
@Autowired
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) {
Map<String, JobParameter> map = new HashMap<>();
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson()));
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
@Override
public List<Long> read() throws Exception {
List<Long> nextBatch = getNextBatch();
return nextBatch.isEmpty() ? null : nextBatch;
}
private Date dateFromPid(Long thePid) {
ResourceTable entity = myResourceTableDao.findById(thePid).orElseThrow(IllegalStateException::new);
return entity.getUpdatedDate();
}
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
if (myBatchSize == null) {
myBatchSize = myDaoConfig.getExpungeBatchSize();
}
if (executionContext.containsKey(CURRENT_THRESHOLD_LOW)) {
myThresholdLow = new Date(executionContext.getLong(CURRENT_THRESHOLD_LOW));
}
}
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong(CURRENT_THRESHOLD_LOW, myThresholdLow.getTime());
}
@Override
public void close() throws ItemStreamException {
}
private List<Long> getNextBatch() {
PageRequest page = PageRequest.of(0, myBatchSize);
List<Long> retval = new ArrayList<>();
Slice<Long> slice;
do {
if (myRequestPartitionId == null || myRequestPartitionId.isAllPartitions()) {
slice = myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime);
} else {
slice = myResourceTableDao.findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime, myRequestPartitionId.getFirstPartitionIdOrNull());
}
retval.addAll(slice.getContent());
retval.removeAll(myAlreadyProcessedPidsWithLowDate);
page = page.next();
} while (retval.size() < myBatchSize && slice.hasNext());
if (ourLog.isDebugEnabled()) {
ourLog.debug("Results: {}", retval);
}
myThresholdLow = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdLow, myAlreadyProcessedPidsWithLowDate, retval);
return retval;
}
@Autowired
public void setRequestPartitionId(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_PARTITION + "']}") String theRequestPartitionIdJson) throws JsonProcessingException {
if (theRequestPartitionIdJson == null) {
return;
}
myRequestPartitionId = RequestPartitionId.fromJson(theRequestPartitionIdJson);
}
}

View File

@ -1,71 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class ReverseCronologicalBatchResourcePidReader extends BaseReverseCronologicalBatchPidReader {
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private BatchResourceSearcher myBatchResourceSearcher;
@Override
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
Set<Long> retval = new LinkedHashSet<>();
addDateCountAndSortToSearch(resourceSearch);
// Perform the search
Integer batchSize = getBatchSize();
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, batchSize);
Set<Long> alreadySeenPids = getAlreadySeenPids();
do {
List<Long> pids = resultIter.getNextResultBatch(batchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
retval.addAll(pids);
retval.removeAll(alreadySeenPids);
} while (retval.size() < batchSize && resultIter.hasNext());
return retval;
}
@Override
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
setDateExtractorFunction(pid -> {
IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid));
return oldestResource.getMeta().getLastUpdated();
});
}
}

View File

@ -1,69 +0,0 @@
package ca.uhn.fhir.jpa.batch.writer;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.ItemWriter;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.List;
/**
* This Spring Batch writer accepts a list of SQL commands and executes them.
* The total number of entities updated or deleted is stored in the execution context
* with the key {@link #ENTITY_TOTAL_UPDATED_OR_DELETED}. The entire list is committed within a
* single transaction (provided by Spring Batch).
*/
public class SqlExecutorWriter implements ItemWriter<List<String>> {
private static final Logger ourLog = LoggerFactory.getLogger(SqlExecutorWriter.class);
public static final String ENTITY_TOTAL_UPDATED_OR_DELETED = "entity.total.updated-or-deleted";
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
private Long totalUpdated = 0L;
private StepExecution myStepExecution;
@BeforeStep
public void setStepExecution(StepExecution stepExecution) {
myStepExecution = stepExecution;
}
@Override
public void write(List<? extends List<String>> theSqlLists) throws Exception {
// Note that since our chunk size is 1, there will always be exactly one list
for (List<String> sqlList : theSqlLists) {
ourLog.info("Executing {} sql commands", sqlList.size());
for (String sql : sqlList) {
ourLog.trace("Executing sql " + sql);
totalUpdated += myEntityManager.createNativeQuery(sql).executeUpdate();
myStepExecution.getExecutionContext().putLong(ENTITY_TOTAL_UPDATED_OR_DELETED, totalUpdated);
}
}
ourLog.debug("{} records updated", totalUpdated);
}
}

View File

@ -0,0 +1,52 @@
package ca.uhn.fhir.jpa.config;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSqlBuilder;
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSvcImpl;
import ca.uhn.fhir.jpa.reindex.Batch2DaoSvcImpl;
import org.springframework.context.annotation.Bean;
import javax.persistence.EntityManager;
public class Batch2SupportConfig {
@Bean
public IBatch2DaoSvc batch2DaoSvc() {
return new Batch2DaoSvcImpl();
}
@Bean
public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder) {
return new DeleteExpungeSvcImpl(theEntityManager, theDeleteExpungeSqlBuilder);
}
@Bean
DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, DaoConfig theDaoConfig, IJpaIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theDaoConfig, theIdHelper, theResourceLinkDao);
}
}

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
@ -9,13 +10,9 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.mdm.batch.MdmClearJobSubmitterImpl;
import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
@ -64,7 +61,6 @@ import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderUri;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictFinderService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
@ -87,15 +83,14 @@ import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
import ca.uhn.fhir.jpa.provider.ValueSetOperationProvider;
import ca.uhn.fhir.jpa.provider.r4.MemberMatcherR4Helper;
import ca.uhn.fhir.jpa.reindex.ResourceReindexSvcImpl;
import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory;
import ca.uhn.fhir.jpa.sched.HapiSchedulerServiceImpl;
import ca.uhn.fhir.jpa.search.SearchStrategyFactory;
import ca.uhn.fhir.jpa.search.ISynchronousSearchSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.search.SearchStrategyFactory;
import ca.uhn.fhir.jpa.search.SynchronousSearchSvcImpl;
import ca.uhn.fhir.jpa.search.builder.QueryStack;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
@ -141,14 +136,12 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.util.ThreadPoolUtil;
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -199,7 +192,8 @@ import java.util.Date;
BeanPostProcessorConfig.class,
BatchJobsConfig.class,
SearchParamConfig.class,
ValidationSupportConfig.class
ValidationSupportConfig.class,
Batch2SupportConfig.class
})
public class JpaConfig {
public static final String JPA_VALIDATION_SUPPORT_CHAIN = "myJpaValidationSupportChain";
@ -355,11 +349,6 @@ public class JpaConfig {
return new ResourceReindexer(theFhirContext);
}
@Bean
public BatchResourceSearcher myBatchResourceSearcher() {
return new BatchResourceSearcher();
}
@Bean
public HapiFhirHibernateJpaDialect hibernateJpaDialect(FhirContext theFhirContext) {
return new HapiFhirHibernateJpaDialect(theFhirContext.getLocalizer());
@ -426,12 +415,6 @@ public class JpaConfig {
return new MdmLinkExpandSvc();
}
@Bean
IMdmClearJobSubmitter mdmClearJobSubmitter() {
return new MdmClearJobSubmitterImpl();
}
@Bean
@Lazy
public TerminologyUploaderProvider terminologyUploaderProvider() {
@ -471,18 +454,6 @@ public class JpaConfig {
return new DeleteExpungeJobSubmitterImpl();
}
@Bean
@Lazy
public PartitionedUrlValidator partitionedUrlValidator() {
return new PartitionedUrlValidator();
}
@Bean
@Lazy
public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter);
}
@Bean
@Lazy
public IBulkDataImportSvc bulkDataImportSvc() {
@ -519,11 +490,6 @@ public class JpaConfig {
return new ResourceVersionSvcDaoImpl();
}
@Bean
public IResourceReindexSvc resourceReindexSvc() {
return new ResourceReindexSvcImpl();
}
/* **************************************************************** *
* Prototype Beans Below *
* **************************************************************** */

View File

@ -117,8 +117,6 @@ import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.transaction.PlatformTransactionManager;
@ -670,9 +668,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
List<String> urlsToDeleteExpunge = Collections.singletonList(theUrl);
try {
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest);
return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobExecution.getId()));
} catch (JobParametersInvalidException e) {
String jobId = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest);
return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobId));
} catch (InvalidRequestException e) {
throw new InvalidRequestException(Msg.code(965) + "Invalid Delete Expunge Request: " + e.getMessage(), e);
}
}

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.delete.job;
package ca.uhn.fhir.jpa.delete.batch2;
/*-
* #%L
@ -23,7 +23,6 @@ package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
@ -32,51 +31,50 @@ import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Collectors;
/**
* Input: list of pids of resources to be deleted and expunged
* Output: list of sql statements to be executed
*/
public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<String>> {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProcessor.class);
public class DeleteExpungeSqlBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class);
public static final String PROCESS_NAME = "Delete Expunging";
public static final String THREAD_PREFIX = "delete-expunge";
private final ResourceTableFKProvider myResourceTableFKProvider;
private final DaoConfig myDaoConfig;
private final IJpaIdHelperService myIdHelper;
private final IResourceLinkDao myResourceLinkDao;
@Autowired
ResourceTableFKProvider myResourceTableFKProvider;
@Autowired
DaoConfig myDaoConfig;
@Autowired
IJpaIdHelperService myIdHelper;
@Autowired
IResourceLinkDao myResourceLinkDao;
public DeleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, DaoConfig theDaoConfig, IJpaIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
myResourceTableFKProvider = theResourceTableFKProvider;
myDaoConfig = theDaoConfig;
myIdHelper = theIdHelper;
myResourceLinkDao = theResourceLinkDao;
}
@Override
public List<String> process(List<Long> thePids) throws Exception {
validateOkToDeleteAndExpunge(thePids);
List<String> retval = new ArrayList<>();
@Nonnull
List<String> convertPidsToDeleteExpungeSql(List<ResourcePersistentId> thePersistentIds) {
List<Long> pids = ResourcePersistentId.toLongList(thePersistentIds);
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
validateOkToDeleteAndExpunge(pids);
List<String> rawSql = new ArrayList<>();
String pidListString = pids.toString().replace("[", "(").replace("]", ")");
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
retval.add(deleteRecordsByColumnSql(pidListString, resourceForeignKey));
rawSql.add(deleteRecordsByColumnSql(pidListString, resourceForeignKey));
}
// Lastly we need to delete records from the resource table all of these other tables link to:
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
retval.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
return retval;
rawSql.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
return rawSql;
}
public void validateOkToDeleteAndExpunge(List<Long> thePids) {
@ -87,9 +85,7 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
List<ResourcePersistentId> targetPidsAsResourceIds = ResourcePersistentId.fromLongList(thePids);
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
Consumer<List<ResourcePersistentId>> listConsumer = someTargetPids -> findResourceLinksWithTargetPidIn(targetPidsAsResourceIds, someTargetPids, conflictResourceLinks);
partitionRunner.runInPartitionedThreads(targetPidsAsResourceIds, listConsumer);
findResourceLinksWithTargetPidIn(targetPidsAsResourceIds, targetPidsAsResourceIds, conflictResourceLinks);
if (conflictResourceLinks.isEmpty()) {
return;

View File

@ -0,0 +1,60 @@
package ca.uhn.fhir.jpa.delete.batch2;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.util.List;
@Transactional(propagation = Propagation.MANDATORY)
public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSvcImpl.class);
private final EntityManager myEntityManager;
private final DeleteExpungeSqlBuilder myDeleteExpungeSqlBuilder;
public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder) {
myEntityManager = theEntityManager;
myDeleteExpungeSqlBuilder = theDeleteExpungeSqlBuilder;
}
@Override
public void deleteExpunge(List<ResourcePersistentId> thePersistentIds) {
List<String> sqlList = myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(thePersistentIds);
ourLog.debug("Executing {} delete expunge sql commands", sqlList.size());
long totalDeleted = 0;
for (String sql : sqlList) {
ourLog.trace("Executing sql " + sql);
totalDeleted += myEntityManager.createNativeQuery(sql).executeUpdate();
}
ourLog.info("{} records deleted", totalDeleted);
// TODO KHS instead of logging progress, produce result chunks that get aggregated into a delete expunge report
}
}

View File

@ -1,102 +0,0 @@
package ca.uhn.fhir.jpa.delete.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import java.util.List;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.DELETE_EXPUNGE_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
* Delete Expunge job.
*/
@Configuration
public class DeleteExpungeJobConfig {
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
@Autowired
private PidReaderCounterListener myPidCountRecorderListener;
@Autowired
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
@Autowired
private SqlExecutorWriter mySqlExecutorWriter;
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
@Lazy
public Job deleteExpungeJob() {
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
.validator(myMultiUrlProcessorParameterValidator)
.start(deleteExpungeUrlListStep())
.build();
}
@Bean
public Step deleteExpungeUrlListStep() {
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
.<List<Long>, List<String>>chunk(1)
.reader(myReverseCronologicalBatchResourcePidReader)
.processor(deleteExpungeProcessor())
.writer(mySqlExecutorWriter)
.listener(myPidCountRecorderListener)
.listener(deleteExpungePromotionListener())
.build();
}
@Bean
public ExecutionContextPromotionListener deleteExpungePromotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
return listener;
}
@Bean
@StepScope
public DeleteExpungeProcessor deleteExpungeProcessor() {
return new DeleteExpungeProcessor();
}
}

View File

@ -29,7 +29,7 @@ import ca.uhn.fhir.jpa.api.pid.EmptyResourcePidList;
import ca.uhn.fhir.jpa.api.pid.HomogeneousResourcePidList;
import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.pid.MixedResourcePidList;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -51,7 +51,7 @@ import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
public class ResourceReindexSvcImpl implements IResourceReindexSvc {
public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
@Autowired
private IResourceTableDao myResourceTableDao;

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -21,10 +21,8 @@ package ca.uhn.fhir.jpa.mdm.config;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.mdm.batch.MdmBatchJobSubmitterFactoryImpl;
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDeleteSvc;
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import org.springframework.context.annotation.Bean;
@ -43,10 +41,6 @@ public class MdmCommonConfig {
return new MdmSearchExpandingInterceptor();
}
@Bean
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
return new MdmBatchJobSubmitterFactoryImpl();
}
@Bean
MdmLinkDeleteSvc mdmLinkDeleteSvc() {

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmLinkCreateSvc;
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
@ -81,8 +80,6 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
@Autowired
IMdmLinkCreateSvc myIMdmLinkCreateSvc;
@Autowired
IMdmBatchJobSubmitterFactory myMdmBatchJobSubmitterFactory;
@Autowired
IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
IJobCoordinator myJobCoordinator;

View File

@ -4,7 +4,6 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
import ca.uhn.fhir.mdm.api.IMdmSubmitSvc;
import ca.uhn.fhir.mdm.provider.MdmControllerHelper;
@ -34,8 +33,6 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
@Autowired
private IMdmControllerSvc myMdmControllerSvc;
@Autowired
private IMdmClearJobSubmitter myMdmClearJobSubmitter;
@Autowired
private IMdmSubmitSvc myMdmSubmitSvc;
@Autowired
private MdmSettings myMdmSettings;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -24,12 +24,17 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum;
import org.awaitility.core.ConditionTimeoutException;
import org.hamcrest.Matchers;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Collection;
import java.util.List;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
public class Batch2JobHelper {
@ -39,11 +44,12 @@ public class Batch2JobHelper {
@Autowired
private IJobCoordinator myJobCoordinator;
public void awaitJobCompletion(String theId) {
public JobInstance awaitJobCompletion(String theId) {
await().until(() -> {
myJobMaintenanceService.runMaintenancePass();
return myJobCoordinator.getInstance(theId).getStatus();
}, equalTo(StatusEnum.COMPLETED));
return myJobCoordinator.getInstance(theId);
}
public void awaitSingleChunkJobCompletion(String theId) {
@ -79,4 +85,38 @@ public class Batch2JobHelper {
public void awaitGatedStepId(String theExpectedGatedStepId, String theInstanceId) {
await().until(() -> theExpectedGatedStepId.equals(myJobCoordinator.getInstance(theInstanceId).getCurrentGatedStepId()));
}
public long getCombinedRecordsProcessed(String theJobId) {
JobInstance job = myJobCoordinator.getInstance(theJobId);
return job.getCombinedRecordsProcessed();
}
// TODO KHS I don't think this works yet
public void awaitAllCompletions(String theJobDefinitionId) {
List<JobInstance> instances = myJobCoordinator.getInstancesbyJobDefinitionIdAndEndedStatus(theJobDefinitionId, false, 100, 0);
awaitJobCompletions(instances);
}
protected void awaitJobCompletions(Collection<JobInstance> theJobInstances) {
// This intermittently fails for unknown reasons, so I've added a bunch
// of extra junk here to improve what we output when it fails
for (JobInstance jobInstance : theJobInstances) {
try {
awaitJobCompletion(jobInstance.getInstanceId());
} catch (ConditionTimeoutException e) {
StringBuilder msg = new StringBuilder();
msg.append("Failed waiting for job to complete.\n");
msg.append("Error: ").append(e).append("\n");
msg.append("Statuses:");
for (JobInstance instance : theJobInstances) {
msg.append("\n * Execution ")
.append(instance.getInstanceId())
.append(" has status ")
.append(instance.getStatus());
}
fail(msg.toString());
}
}
}
}

View File

@ -1,29 +0,0 @@
package ca.uhn.fhir.jpa.batch.job;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import org.springframework.batch.core.JobParameters;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
public final class MultiUrlJobParameterUtil {
private MultiUrlJobParameterUtil() {
}
@Nonnull
public static JobParameters buildJobParameters(String... theUrls) {
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
for (String url : theUrls) {
partitionedUrls.add(new PartitionedUrl(url, RequestPartitionId.defaultPartition()));
}
RequestListJson requestListJson = new RequestListJson();
requestListJson.setPartitionedUrls(partitionedUrls);
return ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_REINDEX, 2401, requestListJson);
}
}

View File

@ -1,64 +0,0 @@
package ca.uhn.fhir.jpa.batch.mdm.job;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.mdm.batch.job.MdmLinkDeleter;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.transaction.PlatformTransactionManager;
import java.util.ArrayList;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class MdmLinkDeleterTest {
@Mock
private DaoConfig myDaoConfig;
@Mock
private PlatformTransactionManager myPlatformTransactionManager;
@Mock
private IMdmLinkDao myMdmLinkDao;
@Captor
ArgumentCaptor<List<Long>> myPidListCaptor;
@InjectMocks
private MdmLinkDeleter myMdmLinkDeleter;
@Test
public void testMdmLinkDeleterSplitsPidList() throws Exception {
int threadCount = 4;
int batchSize = 5;
when(myDaoConfig.getReindexBatchSize()).thenReturn(batchSize);
when(myDaoConfig.getReindexThreadCount()).thenReturn(threadCount);
List<Long> allPidsList = new ArrayList<>();
int count = threadCount*batchSize;
for (long i = 0; i < count; ++i) {
allPidsList.add(i);
}
myMdmLinkDeleter.process(allPidsList);
verify(myMdmLinkDao, times(threadCount)).findAllById(myPidListCaptor.capture());
verify(myMdmLinkDao, times(threadCount)).deleteAll(anyList());
verifyNoMoreInteractions(myMdmLinkDao);
List<List<Long>> pidListList = myPidListCaptor.getAllValues();
assertEquals(threadCount, pidListList.size());
for (List<Long> pidList : pidListList) {
assertEquals(batchSize, pidList.size());
}
}
}

View File

@ -1,125 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ExtendWith(MockitoExtension.class)
class BatchDateThresholdUpdaterTest {
static Date LATE_DATE = new Date();
static Date EARLY_DATE = new Date(LATE_DATE.getTime() - 1000);
static Long PID1 = 1L;
static Long PID2 = 2L;
static Long PID3 = 3L;
BatchDateThresholdUpdater mySvc = new BatchDateThresholdUpdater();
@Test
public void testEmptyList() {
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, Collections.emptySet(), Collections.emptyList());
assertEquals(LATE_DATE, newThreshold);
}
@Test
public void oneItem() {
mySvc.setDateFromPid(pid -> LATE_DATE);
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Collections.singletonList(PID1));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID1));
}
@Test
public void twoItemsSameDate() {
mySvc.setDateFromPid(pid -> LATE_DATE);
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID1, PID2));
}
@Test
public void twoItemsDiffDate() {
List<Date> dates = Arrays.asList(EARLY_DATE, LATE_DATE);
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID2));
}
@Test
public void threeItemsSameDate() {
mySvc.setDateFromPid(pid -> LATE_DATE);
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID1, PID2, PID3));
}
@Test
public void threeItemsDifferentEEL() {
List<Date> dates = Arrays.asList(EARLY_DATE, EARLY_DATE, LATE_DATE);
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID3));
}
@Test
public void threeItemsDifferentELL() {
List<Date> dates = Arrays.asList(EARLY_DATE, LATE_DATE, LATE_DATE);
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, contains(PID2, PID3));
}
@Test
public void threeItemsDifferentLEE() {
List<Date> dates = Arrays.asList(LATE_DATE, EARLY_DATE, EARLY_DATE);
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
assertEquals(EARLY_DATE, newThreshold);
assertThat(seenPids, contains(PID2, PID3));
}
@Test
public void threeItemsDifferentLLE() {
List<Date> dates = Arrays.asList(LATE_DATE, LATE_DATE, EARLY_DATE);
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
Set<Long> seenPids = new HashSet<>();
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
assertEquals(EARLY_DATE, newThreshold);
assertThat(seenPids, contains(PID3));
}
@Test
public void oneHundredItemsSameDate() {
mySvc.setDateFromPid(pid -> LATE_DATE);
Set<Long> seenPids = new HashSet<>();
List<Long> bigList = new ArrayList<>();
for (int i = 0; i < 100; ++i) {
bigList.add((long) i);
}
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, bigList);
assertEquals(LATE_DATE, newThreshold);
assertThat(seenPids, hasSize(100));
}
}

View File

@ -1,173 +0,0 @@
package ca.uhn.fhir.jpa.batch.reader;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.jsonldjava.shaded.com.google.common.collect.Lists;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class ReverseCronologicalBatchResourcePidReaderTest {
private static final int BATCH_SIZE = 3;
static FhirContext ourFhirContext = FhirContext.forR4Cached();
static String URL_A = "a";
static String URL_B = "b";
static String URL_C = "c";
static Set<ResourcePersistentId> emptySet = Collections.emptySet();
static RequestPartitionId partId = RequestPartitionId.defaultPartition();
Patient myPatient;
@Mock
MatchUrlService myMatchUrlService;
@Mock
DaoRegistry myDaoRegistry;
@Mock
IFhirResourceDao<Patient> myPatientDao;
private final RequestPartitionId myDefaultPartitionId = RequestPartitionId.defaultPartition();
@Mock
private IResultIterator myResultIter;
@InjectMocks
ReverseCronologicalBatchResourcePidReader myReader = new ReverseCronologicalBatchResourcePidReader();
@Mock
private BatchResourceSearcher myBatchResourceSearcher;
@BeforeEach
public void before() throws JsonProcessingException {
RequestListJson requestListJson = new RequestListJson();
requestListJson.setPartitionedUrls(Lists.newArrayList(new PartitionedUrl(URL_A, partId), new PartitionedUrl(URL_B, partId), new PartitionedUrl(URL_C, partId)));
ObjectMapper mapper = new ObjectMapper();
String requestListJsonString = mapper.writeValueAsString(requestListJson);
myReader.setRequestListJson(requestListJsonString);
myReader.setBatchSize(BATCH_SIZE);
SearchParameterMap map = new SearchParameterMap();
RuntimeResourceDefinition patientResDef = ourFhirContext.getResourceDefinition("Patient");
when(myMatchUrlService.getResourceSearch(URL_A, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
when(myMatchUrlService.getResourceSearch(URL_B, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
when(myMatchUrlService.getResourceSearch(URL_C, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
when(myDaoRegistry.getResourceDao("Patient")).thenReturn(myPatientDao);
myPatient = new Patient();
when(myPatientDao.readByPid(any())).thenReturn(myPatient);
Calendar cal = new GregorianCalendar(2021, 1, 1);
myPatient.getMeta().setLastUpdated(cal.getTime());
when(myBatchResourceSearcher.performSearch(any(), any())).thenReturn(myResultIter);
}
private Set<ResourcePersistentId> buildPidSet(Integer... thePids) {
return Arrays.stream(thePids)
.map(Long::new)
.map(ResourcePersistentId::new)
.collect(Collectors.toSet());
}
@Test
public void test3x1() throws Exception {
when(myResultIter.getNextResultBatch(BATCH_SIZE))
.thenReturn(buildPidSet(1, 2, 3))
.thenReturn(emptySet)
.thenReturn(buildPidSet(4, 5, 6))
.thenReturn(emptySet)
.thenReturn(buildPidSet(7, 8))
.thenReturn(emptySet);
assertListEquals(myReader.read(), 1, 2, 3);
assertListEquals(myReader.read(), 4, 5, 6);
assertListEquals(myReader.read(), 7, 8);
assertNull(myReader.read());
}
@Test
public void testReadRepeat() throws Exception {
when(myResultIter.getNextResultBatch(BATCH_SIZE))
.thenReturn(buildPidSet(1, 2, 3))
.thenReturn(buildPidSet(1, 2, 3))
.thenReturn(buildPidSet(2, 3, 4))
.thenReturn(buildPidSet(4, 5))
.thenReturn(emptySet);
when(myResultIter.hasNext())
.thenReturn(true)
.thenReturn(true)
.thenReturn(true)
.thenReturn(true)
.thenReturn(false);
assertListEquals(myReader.read(), 1, 2, 3);
assertListEquals(myReader.read(), 4, 5);
assertNull(myReader.read());
}
@Test
public void test1x3start() throws Exception {
when(myResultIter.getNextResultBatch(BATCH_SIZE))
.thenReturn(buildPidSet(1, 2, 3))
.thenReturn(buildPidSet(4, 5, 6))
.thenReturn(buildPidSet(7, 8))
.thenReturn(emptySet)
.thenReturn(emptySet)
.thenReturn(emptySet);
assertListEquals(myReader.read(), 1, 2, 3);
assertListEquals(myReader.read(), 4, 5, 6);
assertListEquals(myReader.read(), 7, 8);
assertNull(myReader.read());
}
@Test
public void test1x3end() throws Exception {
when(myResultIter.getNextResultBatch(BATCH_SIZE))
.thenReturn(emptySet)
.thenReturn(emptySet)
.thenReturn(buildPidSet(1, 2, 3))
.thenReturn(buildPidSet(4, 5, 6))
.thenReturn(buildPidSet(7, 8))
.thenReturn(emptySet);
assertListEquals(myReader.read(), 1, 2, 3);
assertListEquals(myReader.read(), 4, 5, 6);
assertListEquals(myReader.read(), 7, 8);
assertNull(myReader.read());
}
private void assertListEquals(List<Long> theList, Integer... theValues) {
assertThat(theList, hasSize(theValues.length));
for (int i = 0; i < theList.size(); ++i) {
assertEquals(theList.get(i), Long.valueOf(theValues[i]));
}
}
}

View File

@ -1,13 +1,13 @@
package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
@ -21,8 +21,6 @@ import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
@ -90,7 +88,6 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
assertEquals(Msg.code(964) + "_expunge cannot be used with _cascade", e.getMessage());
}
@Test
public void testDeleteExpungeThrowExceptionIfForeignKeyLinksExists() {
// setup
@ -104,19 +101,19 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
// execute
DeleteMethodOutcome outcome = myOrganizationDao.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
Long jobExecutionId = jobExecutionIdFromOutcome(outcome);
JobExecution job = myBatchJobHelper.awaitJobExecution(jobExecutionId);
String jobExecutionId = jobExecutionIdFromOutcome(outcome);
JobInstance job = myBatch2JobHelper.awaitJobFailure(jobExecutionId);
// validate
assertEquals(BatchStatus.FAILED, job.getStatus());
assertThat(job.getExitStatus().getExitDescription(), containsString("DELETE with _expunge=true failed. Unable to delete " + organizationId.toVersionless() + " because " + patientId.toVersionless() + " refers to it via the path Patient.managingOrganization"));
assertEquals(StatusEnum.ERRORED, job.getStatus());
assertThat(job.getErrorMessage(), containsString("DELETE with _expunge=true failed. Unable to delete " + organizationId.toVersionless() + " because " + patientId.toVersionless() + " refers to it via the path Patient.managingOrganization"));
}
private Long jobExecutionIdFromOutcome(DeleteMethodOutcome theResult) {
private String jobExecutionIdFromOutcome(DeleteMethodOutcome theResult) {
OperationOutcome operationOutcome = (OperationOutcome) theResult.getOperationOutcome();
String diagnostics = operationOutcome.getIssueFirstRep().getDiagnostics();
String[] parts = diagnostics.split("Delete job submitted with id ");
return Long.valueOf(parts[1]);
return parts[1];
}
@Test
@ -139,12 +136,12 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
// execute
DeleteMethodOutcome outcome = myOrganizationDao.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
Long jobId = jobExecutionIdFromOutcome(outcome);
JobExecution job = myBatchJobHelper.awaitJobExecution(jobId);
String jobId = jobExecutionIdFromOutcome(outcome);
JobInstance job = myBatch2JobHelper.awaitJobFailure(jobId);
// validate
assertEquals(BatchStatus.FAILED, job.getStatus());
assertThat(job.getExitStatus().getExitDescription(), containsString("DELETE with _expunge=true failed. Unable to delete "));
assertEquals(StatusEnum.ERRORED, job.getStatus());
assertThat(job.getErrorMessage(), containsString("DELETE with _expunge=true failed. Unable to delete "));
}
@Test
@ -160,15 +157,14 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
// validate
Long jobExecutionId = jobExecutionIdFromOutcome(outcome);
JobExecution job = myBatchJobHelper.awaitJobExecution(jobExecutionId);
String jobId = jobExecutionIdFromOutcome(outcome);
JobInstance job = myBatch2JobHelper.awaitJobCompletion(jobId);
// 10 / 3 rounded up = 4
assertEquals(4, myBatchJobHelper.getReadCount(jobExecutionId));
assertEquals(4, myBatchJobHelper.getWriteCount(jobExecutionId));
assertEquals(10, myBatch2JobHelper.getCombinedRecordsProcessed(jobId));
assertEquals(30, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
assertEquals(10, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
// TODO KHS replace these with a report
// assertEquals(30, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
// assertEquals(10, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
}
@Test
@ -183,13 +179,13 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
// validate
Long jobExecutionId = jobExecutionIdFromOutcome(outcome);
JobExecution job = myBatchJobHelper.awaitJobExecution(jobExecutionId);
assertEquals(1, myBatchJobHelper.getReadCount(jobExecutionId));
assertEquals(1, myBatchJobHelper.getWriteCount(jobExecutionId));
String jobId = jobExecutionIdFromOutcome(outcome);
JobInstance job = myBatch2JobHelper.awaitJobCompletion(jobId);
assertEquals(10, myBatch2JobHelper.getCombinedRecordsProcessed(jobId));
assertEquals(30, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
assertEquals(10, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
// TODO KHS replace these with a report
// assertEquals(30, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
// assertEquals(10, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
}
@Test
@ -205,15 +201,15 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
//execute
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
Long jobExecutionId = jobExecutionIdFromOutcome(outcome);
JobExecution job = myBatchJobHelper.awaitJobExecution(jobExecutionId);
String jobId = jobExecutionIdFromOutcome(outcome);
JobInstance job = myBatch2JobHelper.awaitJobCompletion(jobId);
// validate
assertEquals(1, myBatchJobHelper.getReadCount(jobExecutionId));
assertEquals(1, myBatchJobHelper.getWriteCount(jobExecutionId));
assertEquals(2, myBatch2JobHelper.getCombinedRecordsProcessed(jobId));
assertEquals(7, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
assertEquals(2, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
// TODO KHS replace these with a report
// assertEquals(7, job.getExecutionContext().getLong(SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED));
// assertEquals(2, job.getExecutionContext().getLong(PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED));
}
}

View File

@ -1,32 +1,27 @@
package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobParameters;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.Test;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class DeleteExpungeJobTest extends BaseJpaR4Test {
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
private IJobCoordinator myJobCoordinator;
@Autowired
@Qualifier(BatchConstants.DELETE_EXPUNGE_JOB_NAME)
private Job myDeleteExpungeJob;
@Autowired
private BatchJobHelper myBatchJobHelper;
private Batch2JobHelper myBatch2JobHelper;
@Test
public void testDeleteExpunge() throws Exception {
@ -47,19 +42,33 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
obsInactive.setSubject(new Reference(pDelId));
IIdType oDelId = myObservationDao.create(obsInactive).getId().toUnqualifiedVersionless();
DiagnosticReport diagActive = new DiagnosticReport();
diagActive.setSubject(new Reference(pKeepId));
IIdType dKeepId = myDiagnosticReportDao.create(diagActive).getId().toUnqualifiedVersionless();
DiagnosticReport diagInactive = new DiagnosticReport();
diagInactive.setSubject(new Reference(pDelId));
IIdType dDelId = myDiagnosticReportDao.create(diagInactive).getId().toUnqualifiedVersionless();
// validate precondition
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
assertEquals(2, myDiagnosticReportDao.search(SearchParameterMap.newSynchronous()).size());
JobParameters jobParameters = MultiUrlJobParameterUtil.buildJobParameters("Observation?subject.active=false", "Patient?active=false");
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
jobParameters.addUrl("Observation?subject.active=false").addUrl("DiagnosticReport?subject.active=false");
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setParameters(jobParameters);
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
// execute
JobExecution jobExecution = myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
myBatchJobHelper.awaitJobCompletion(jobExecution);
String jobId = myJobCoordinator.startInstance(startRequest);
myBatch2JobHelper.awaitJobCompletion(jobId);
// validate
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
assertEquals(1, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
assertEquals(1, myDiagnosticReportDao.search(SearchParameterMap.newSynchronous()).size());
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
}
}

View File

@ -1,73 +0,0 @@
package ca.uhn.fhir.jpa.delete.job;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class MultiUrlJobParameterValidatorTest {
static final FhirContext ourFhirContext = FhirContext.forR4Cached();
@Mock
MatchUrlService myMatchUrlService;
@Mock
DaoRegistry myDaoRegistry;
MultiUrlJobParameterValidator mySvc;
@BeforeEach
public void initMocks() {
mySvc = new MultiUrlJobParameterValidator(myMatchUrlService, myDaoRegistry);
}
@Test
public void testValidate() throws JobParametersInvalidException, JsonProcessingException {
// setup
JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith");
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition());
when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch);
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true);
// execute
mySvc.validate(parameters);
// verify
verify(myMatchUrlService, times(2)).getResourceSearch(anyString(), any());
}
@Test
public void testValidateBadType() throws JobParametersInvalidException, JsonProcessingException {
JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory");
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition());
when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch);
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(false);
try {
mySvc.validate(parameters);
fail();
} catch (JobParametersInvalidException e) {
assertEquals(Msg.code(1281) + "The resource type Patient is not supported on this server.", e.getMessage());
}
}
}

View File

@ -1,9 +1,9 @@
package ca.uhn.fhir.jpa.delete.provider;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.test.utilities.JettyUtil;
import org.apache.commons.io.IOUtils;

View File

@ -1,10 +1,11 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.provider.DiffProvider;
import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider;
@ -14,6 +15,7 @@ import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
import ca.uhn.fhir.parser.StrictErrorHandler;
@ -24,8 +26,6 @@ import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
import ca.uhn.fhir.test.utilities.JettyUtil;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;

View File

@ -7,13 +7,11 @@ import ca.uhn.fhir.interceptor.api.IPointcut;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.delete.job.ReindexTestHelper;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.Bundle;
@ -25,7 +23,6 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
@ -39,9 +36,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(MultitenantBatchOperationR4Test.class);
@Autowired
private BatchJobHelper myBatchJobHelper;
@BeforeEach
@Override
public void before() throws Exception {
@ -82,7 +76,7 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal());
Parameters input = new Parameters();
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false");
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "Patient?active=false");
MyInterceptor interceptor = new MyInterceptor();
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PARTITION_SELECTED, interceptor);
@ -97,8 +91,10 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
.execute();
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
myBatchJobHelper.awaitAllBulkJobCompletions(BatchConstants.DELETE_EXPUNGE_JOB_NAME);
assertThat(interceptor.requestPartitionIds, hasSize(1));
String jobId = BatchHelperR4.jobIdFromBatch2Parameters(response);
myBatch2JobHelper.awaitSingleChunkJobCompletion(jobId);
assertThat(interceptor.requestPartitionIds, hasSize(3));
RequestPartitionId partitionId = interceptor.requestPartitionIds.get(0);
assertEquals(TENANT_B_ID, partitionId.getFirstPartitionIdOrNull());
assertEquals(TENANT_B, partitionId.getFirstPartitionNameOrNull());
@ -106,10 +102,7 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
assertEquals("Patient", interceptor.resourceDefs.get(0).getName());
myInterceptorRegistry.unregisterInterceptor(interceptor);
Long jobId = BatchHelperR4.jobIdFromParameters(response);
assertEquals(1, myBatchJobHelper.getReadCount(jobId));
assertEquals(1, myBatchJobHelper.getWriteCount(jobId));
assertEquals(1, myBatch2JobHelper.getCombinedRecordsProcessed(jobId));
// validate only the false patient in TENANT_B is removed
assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal());

View File

@ -1,12 +1,11 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.rp.r4.BinaryResourceProvider;
import ca.uhn.fhir.jpa.rp.r4.DiagnosticReportResourceProvider;
@ -17,6 +16,7 @@ import ca.uhn.fhir.jpa.rp.r4.PatientResourceProvider;
import ca.uhn.fhir.jpa.rp.r4.PractitionerResourceProvider;
import ca.uhn.fhir.jpa.rp.r4.ServiceRequestResourceProvider;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
@ -32,7 +32,6 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import ca.uhn.fhir.test.utilities.JettyUtil;
@ -62,6 +61,7 @@ import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
@ -822,13 +822,22 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
obsInactive.setSubject(new Reference(pDelId.toUnqualifiedVersionless()));
IIdType obsDelId = myClient.create().resource(obsInactive).execute().getId();
DiagnosticReport diagActive = new DiagnosticReport();
diagActive.setSubject(new Reference(pKeepId.toUnqualifiedVersionless()));
IIdType dKeepId = myClient.create().resource(diagActive).execute().getId();
DiagnosticReport diagInactive = new DiagnosticReport();
diagInactive.setSubject(new Reference(pDelId.toUnqualifiedVersionless()));
IIdType diagDelId = myClient.create().resource(diagInactive).execute().getId();
// validate setup
assertEquals(14, getAllResourcesOfType("Patient").getTotal());
assertEquals(2, getAllResourcesOfType("Observation").getTotal());
assertEquals(2, getAllResourcesOfType("DiagnosticReport").getTotal());
Parameters input = new Parameters();
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Observation?subject.active=false");
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false");
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "Observation?subject.active=false");
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "DiagnosticReport?subject.active=false");
int batchSize = 2;
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize));
@ -842,18 +851,13 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
.execute();
ourLog.info(ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
myBatchJobHelper.awaitAllBulkJobCompletions(BatchConstants.DELETE_EXPUNGE_JOB_NAME);
Long jobId = BatchHelperR4.jobIdFromParameters(response);
String jobId = BatchHelperR4.jobIdFromBatch2Parameters(response);
// validate
// 1 observation
// + 12/batchSize inactive patients
// + 1 patient with id pDelId
// = 1 + 6 + 1 = 8
assertEquals(8, myBatchJobHelper.getReadCount(jobId));
assertEquals(8, myBatchJobHelper.getWriteCount(jobId));
myBatch2JobHelper.awaitJobCompletion(jobId);
assertEquals(2, myBatch2JobHelper.getCombinedRecordsProcessed(jobId));
// validate
Bundle obsBundle = getAllResourcesOfType("Observation");
@ -861,11 +865,10 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
assertThat(observations, hasSize(1));
assertEquals(oKeepId, observations.get(0).getIdElement());
Bundle patientBundle = getAllResourcesOfType("Patient");
List<Patient> patients = BundleUtil.toListOfResourcesOfType(myFhirContext, patientBundle, Patient.class);
assertThat(patients, hasSize(1));
assertEquals(pKeepId, patients.get(0).getIdElement());
Bundle diagBundle = getAllResourcesOfType("DiagnosticReport");
List<DiagnosticReport> diags = BundleUtil.toListOfResourcesOfType(myFhirContext, diagBundle, DiagnosticReport.class);
assertThat(diags, hasSize(1));
assertEquals(dKeepId, diags.get(0).getIdElement());
}
private Bundle getAllResourcesOfType(String theResourceName) {

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.reindex;
import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.pid.TypedResourcePid;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test;
@ -24,7 +24,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class ResourceReindexSvcImplTest extends BaseJpaR4Test {
@Autowired
private IResourceReindexSvc mySvc;
private IBatch2DaoSvc mySvc;
@Test
public void testFetchResourceIdsPage_NoUrl_WithData() {

View File

@ -2,12 +2,11 @@ package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
@ -28,6 +27,7 @@ import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
@ -586,11 +586,12 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
Parameters input = new Parameters();
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=true");
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "Patient?active=true");
int batchSize = 2;
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize));
// execute
myCaptureQueriesListener.clear();
Parameters response = myClient
.operation()
.onServer()
@ -599,11 +600,13 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
.execute();
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
myBatchJobHelper.awaitAllBulkJobCompletions(BatchConstants.DELETE_EXPUNGE_JOB_NAME);
int deleteCount = myCaptureQueriesListener.countDeleteQueries();
String jobId = BatchHelperR4.jobIdFromBatch2Parameters(response);
myBatch2JobHelper.awaitJobCompletion(jobId);
int deleteCount = myCaptureQueriesListener.getDeleteQueries().size();
myCaptureQueriesListener.logDeleteQueries();
assertThat(deleteCount, is(equalTo(19)));
assertThat(deleteCount, is(equalTo(30)));
}
private void validateNoErrors(List<BaseTask> tasks) {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,6 +20,16 @@ package ca.uhn.fhir.rest.api.server.storage;
* #L%
*/
import ca.uhn.fhir.rest.api.server.RequestDetails;
import java.util.List;
// Tag interface for Spring auto-wiring
public interface IDeleteExpungeJobSubmitter extends IMultiUrlJobSubmitter {
public interface IDeleteExpungeJobSubmitter {
/**
* @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number
* @param theUrlsToProcess A list of strings of the form "/Patient?active=true"
* @return The Batch2 JobId that was started to run this batch job
*/
String submitJob(Integer theBatchSize, List<String> theUrlsToProcess, RequestDetails theRequest);
}

View File

@ -1,37 +0,0 @@
package ca.uhn.fhir.rest.api.server.storage;
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersInvalidException;
import java.util.List;
public interface IMultiUrlJobSubmitter {
/**
* @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number
* @param theUrlsToProcess A list of strings of the form "/Patient?active=true"
* @return The Spring Batch JobExecution that was started to run this batch job
* @throws JobParametersInvalidException
*/
JobExecution submitJob(Integer theBatchSize, List<String> theUrlsToProcess, RequestDetails theRequest) throws JobParametersInvalidException;
}

View File

@ -1,30 +0,0 @@
package ca.uhn.fhir.rest.api.server.storage;
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersInvalidException;
// Tag interface for Spring wiring
public interface IReindexJobSubmitter extends IMultiUrlJobSubmitter {
JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequest) throws JobParametersInvalidException;
}

View File

@ -1,66 +0,0 @@
package ca.uhn.fhir.rest.server.provider;
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IMultiUrlJobSubmitter;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ParametersUtil;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersInvalidException;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.util.List;
public class MultiUrlProcessor {
private final FhirContext myFhirContext;
private final IMultiUrlJobSubmitter myMultiUrlProcessorJobSubmitter;
public MultiUrlProcessor(FhirContext theFhirContext, IMultiUrlJobSubmitter theMultiUrlProcessorJobSubmitter) {
myMultiUrlProcessorJobSubmitter = theMultiUrlProcessorJobSubmitter;
myFhirContext = theFhirContext;
}
public IBaseParameters processUrls(List<String> theUrlsToProcess, Integer theBatchSize, RequestDetails theRequestDetails) {
try {
JobExecution jobExecution = myMultiUrlProcessorJobSubmitter.submitJob(theBatchSize, theUrlsToProcess, theRequestDetails);
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobExecution.getJobId());
return retval;
} catch (JobParametersInvalidException e) {
throw new InvalidRequestException(Msg.code(309) + "Invalid job parameters: " + e.getMessage(), e);
}
}
@Nullable
public Integer getBatchSize(IPrimitiveType<BigDecimal> theBatchSize) {
Integer batchSize = null;
if (theBatchSize != null && !theBatchSize.isEmpty()) {
batchSize = theBatchSize.getValue().intValue();
}
return batchSize;
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE9-SNAPSHOT</version>
<version>6.1.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.batch2.jobs.config;
* #L%
*/
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx;
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportAppCtx;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
import org.springframework.context.annotation.Configuration;
@ -28,8 +29,10 @@ import org.springframework.context.annotation.Import;
//When you define a new batch job, add it here.
@Configuration
@Import({
BatchCommonCtx.class,
BulkImportAppCtx.class,
ReindexAppCtx.class
ReindexAppCtx.class,
DeleteExpungeAppCtx.class
})
public class Batch2JobsConfig {
// nothing

View File

@ -0,0 +1,13 @@
package ca.uhn.fhir.batch2.jobs.config;
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.context.annotation.Bean;
public class BatchCommonCtx {
@Bean
UrlPartitioner urlPartitioner(MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc);
}
}

View File

@ -0,0 +1,99 @@
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
import ca.uhn.fhir.batch2.jobs.step.LoadIdsStep;
import ca.uhn.fhir.batch2.model.JobDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class DeleteExpungeAppCtx {
public static final String JOB_DELETE_EXPUNGE = "DELETE_EXPUNGE";
@Bean
public JobDefinition<DeleteExpungeJobParameters> expungeJobDefinition(
IBatch2DaoSvc theBatch2DaoSvc,
HapiTransactionService theHapiTransactionService,
IDeleteExpungeSvc theDeleteExpungeSvc
) {
return JobDefinition
.newBuilder()
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
.setJobDescription("Expunge resources")
.setJobDefinitionVersion(1)
.setParametersType(DeleteExpungeJobParameters.class)
.setParametersValidator(expungeJobParametersValidator(theBatch2DaoSvc))
.gatedExecution()
.addFirstStep(
"generate-ranges",
"Generate data ranges to expunge",
PartitionedUrlChunkRangeJson.class,
expungeGenerateRangeChunksStep())
.addIntermediateStep(
"load-ids",
"Load IDs of resources to expunge",
ResourceIdListWorkChunkJson.class,
loadIdsStep(theBatch2DaoSvc))
.addLastStep("expunge",
"Perform the resource expunge",
expungeStep(theHapiTransactionService, theDeleteExpungeSvc)
)
.build();
}
@Bean
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc) {
return new DeleteExpungeJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc));
}
@Bean
public DeleteExpungeStep expungeStep(HapiTransactionService theHapiTransactionService, IDeleteExpungeSvc theDeleteExpungeSvc) {
return new DeleteExpungeStep(theHapiTransactionService, theDeleteExpungeSvc);
}
@Bean
public GenerateRangeChunksStep expungeGenerateRangeChunksStep() {
return new GenerateRangeChunksStep();
}
@Bean
public LoadIdsStep loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
return new LoadIdsStep(theBatch2DaoSvc);
}
@Bean
public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter);
}
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.mdm.api;
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* HAPI FHIR - Master Data Management
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
@ -20,6 +20,7 @@ package ca.uhn.fhir.mdm.api;
* #L%
*/
public interface IMdmBatchJobSubmitterFactory {
IMdmClearJobSubmitter getClearJobSubmitter();
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
public class DeleteExpungeJobParameters extends PartitionedUrlListJobParameters {
}

View File

@ -0,0 +1,42 @@
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public class DeleteExpungeJobParametersValidator implements IJobParametersValidator<DeleteExpungeJobParameters> {
private final UrlListValidator myUrlListValidator;
public DeleteExpungeJobParametersValidator(UrlListValidator theUrlListValidator) {
myUrlListValidator = theUrlListValidator;
}
@Nullable
@Override
public List<String> validate(@NotNull DeleteExpungeJobParameters theParameters) {
return myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
}
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.delete;
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* HAPI FHIR JPA Server
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
@ -20,41 +20,36 @@ package ca.uhn.fhir.jpa.delete;
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.transaction.Transactional;
import java.util.List;
import static ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE;
public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter {
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
@Qualifier(BatchConstants.DELETE_EXPUNGE_JOB_NAME)
private Job myDeleteExpungeJob;
IJobCoordinator myJobCoordinator;
@Autowired
FhirContext myFhirContext;
@Autowired
@ -64,30 +59,44 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
@Autowired
DaoConfig myDaoConfig;
@Autowired
PartitionedUrlValidator myPartitionedUrlValidator;
@Autowired
IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
UrlPartitioner myUrlPartitioner;
@Override
@Transactional(Transactional.TxType.NEVER)
public JobExecution submitJob(Integer theBatchSize, List<String> theUrlsToDeleteExpunge, RequestDetails theRequest) throws JobParametersInvalidException {
public String submitJob(Integer theBatchSize, List<String> theUrlsToDeleteExpunge, RequestDetails theRequestDetails) {
if (theBatchSize == null) {
theBatchSize = myDaoConfig.getExpungeBatchSize();
}
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrlsToDeleteExpunge);
if (!myDaoConfig.canDeleteExpunge()) {
throw new ForbiddenOperationException(Msg.code(820) + "Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
}
for (String url : theUrlsToDeleteExpunge) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(String.class, url);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
}
JobParameters jobParameters = ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_DELETE_EXPUNGE, theBatchSize, requestListJson);
return myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
DeleteExpungeJobParameters deleteExpungeJobParameters = new DeleteExpungeJobParameters();
// Set partition for each url since resource type can determine partition
theUrlsToDeleteExpunge.stream()
.filter(StringUtils::isNotBlank)
.map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails))
.forEach(deleteExpungeJobParameters::addPartitionedUrl);
deleteExpungeJobParameters.setBatchSize(theBatchSize);
ReadPartitionIdRequestDetails details = new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
// Also set toplevel partition in case there are no urls
RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details);
deleteExpungeJobParameters.setRequestPartitionId(requestPartition);
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(JOB_DELETE_EXPUNGE);
startRequest.setParameters(deleteExpungeJobParameters);
return myJobCoordinator.startInstance(startRequest);
}
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.rest.server.provider;
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* HAPI FHIR - Server Framework
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
@ -27,6 +27,9 @@ import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.util.ParametersUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -35,10 +38,12 @@ import java.util.List;
import java.util.stream.Collectors;
public class DeleteExpungeProvider {
private final MultiUrlProcessor myMultiUrlProcessor;
private final FhirContext myFhirContext;
private final IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
public DeleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
myMultiUrlProcessor = new MultiUrlProcessor(theFhirContext, theDeleteExpungeJobSubmitter);
myFhirContext = theFhirContext;
myDeleteExpungeJobSubmitter = theDeleteExpungeJobSubmitter;
}
@Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false)
@ -48,10 +53,21 @@ public class DeleteExpungeProvider {
RequestDetails theRequestDetails
) {
if (theUrlsToDeleteExpunge == null) {
throw new InvalidRequestException(Msg.code(1976) + "At least one `url` parameter to $delete-expunge must be provided.");
throw new InvalidRequestException(Msg.code(2101) + "At least one `url` parameter to $delete-expunge must be provided.");
}
List<String> urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
Integer batchSize = myMultiUrlProcessor.getBatchSize(theBatchSize);
return myMultiUrlProcessor.processUrls(urls, batchSize, theRequestDetails);
List<String> urls = theUrlsToDeleteExpunge.stream()
.map(IPrimitiveType::getValue)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toList());
Integer batchSize = null;
if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().intValue() > 0) {
batchSize = theBatchSize.getValue().intValue();
}
String jobId = myDeleteExpungeJobSubmitter.submitJob(batchSize, urls, theRequestDetails);
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
ParametersUtil.addParameterToParametersString(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobId);
return retval;
}
}

View File

@ -0,0 +1,108 @@
package ca.uhn.fhir.batch2.jobs.expunge;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.util.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import javax.annotation.Nonnull;
import java.util.List;
public class DeleteExpungeStep implements IJobStepWorker<ReindexJobParameters, ResourceIdListWorkChunkJson, VoidModel> {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeStep.class);
private final HapiTransactionService myHapiTransactionService;
private final IDeleteExpungeSvc myDeleteExpungeSvc;
public DeleteExpungeStep(HapiTransactionService theHapiTransactionService, IDeleteExpungeSvc theDeleteExpungeSvc) {
myHapiTransactionService = theHapiTransactionService;
myDeleteExpungeSvc = theDeleteExpungeSvc;
}
@Nonnull
@Override
public RunOutcome run(@Nonnull StepExecutionDetails<ReindexJobParameters, ResourceIdListWorkChunkJson> theStepExecutionDetails, @Nonnull IJobDataSink<VoidModel> theDataSink) throws JobExecutionFailedException {
ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData();
return doDeleteExpunge(data, theDataSink, theStepExecutionDetails.getInstance().getInstanceId(), theStepExecutionDetails.getChunkId());
}
@Nonnull
public RunOutcome doDeleteExpunge(ResourceIdListWorkChunkJson data, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId) {
RequestDetails requestDetails = new SystemRequestDetails();
TransactionDetails transactionDetails = new TransactionDetails();
myHapiTransactionService.execute(requestDetails, transactionDetails, new DeleteExpungeJob(data, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId));
return new RunOutcome(data.size());
}
private class DeleteExpungeJob implements TransactionCallback<Void> {
private final ResourceIdListWorkChunkJson myData;
private final RequestDetails myRequestDetails;
private final TransactionDetails myTransactionDetails;
private final IJobDataSink<VoidModel> myDataSink;
private final String myChunkId;
private final String myInstanceId;
public DeleteExpungeJob(ResourceIdListWorkChunkJson theData, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId) {
myData = theData;
myRequestDetails = theRequestDetails;
myTransactionDetails = theTransactionDetails;
myDataSink = theDataSink;
myInstanceId = theInstanceId;
myChunkId = theChunkId;
}
@Override
public Void doInTransaction(@Nonnull TransactionStatus theStatus) {
List<ResourcePersistentId> persistentIds = myData.getResourcePersistentIds();
ourLog.info("Starting delete expunge work chunk with {} resources - Instance[{}] Chunk[{}]", persistentIds.size(), myInstanceId, myChunkId);
StopWatch sw = new StopWatch();
myDeleteExpungeSvc.deleteExpunge(persistentIds);
return null;
}
}
}

View File

@ -21,11 +21,17 @@ package ca.uhn.fhir.batch2.jobs.reindex;
*/
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
import ca.uhn.fhir.batch2.jobs.step.LoadIdsStep;
import ca.uhn.fhir.batch2.model.JobDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -35,25 +41,25 @@ public class ReindexAppCtx {
public static final String JOB_REINDEX = "REINDEX";
@Bean
public JobDefinition<ReindexJobParameters> reindexJobDefinition(IResourceReindexSvc theResourceReindexSvc) {
public JobDefinition<ReindexJobParameters> reindexJobDefinition(IBatch2DaoSvc theBatch2DaoSvc) {
return JobDefinition
.newBuilder()
.setJobDefinitionId(JOB_REINDEX)
.setJobDescription("Reindex resources")
.setJobDefinitionVersion(1)
.setParametersType(ReindexJobParameters.class)
.setParametersValidator(reindexJobParametersValidator())
.setParametersValidator(reindexJobParametersValidator(theBatch2DaoSvc))
.gatedExecution()
.addFirstStep(
"generate-ranges",
"Generate data ranges to reindex",
ReindexChunkRangeJson.class,
PartitionedUrlChunkRangeJson.class,
reindexGenerateRangeChunksStep())
.addIntermediateStep(
"load-ids",
"Load IDs of resources to reindex",
ResourceIdListWorkChunkJson.class,
loadIdsStep(theResourceReindexSvc))
loadIdsStep(theBatch2DaoSvc))
.addLastStep("reindex",
"Perform the resource reindex",
reindexStep()
@ -62,8 +68,8 @@ public class ReindexAppCtx {
}
@Bean
public ReindexJobParametersValidator reindexJobParametersValidator() {
return new ReindexJobParametersValidator();
public ReindexJobParametersValidator reindexJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc) {
return new ReindexJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc));
}
@Bean
@ -77,13 +83,13 @@ public class ReindexAppCtx {
}
@Bean
public LoadIdsStep loadIdsStep(IResourceReindexSvc theResourceReindexSvc) {
return new LoadIdsStep(theResourceReindexSvc);
public LoadIdsStep loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
return new LoadIdsStep(theBatch2DaoSvc);
}
@Bean
public ReindexProvider reindexProvider(FhirContext theFhirContext, IJobCoordinator theJobCoordinator, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
return new ReindexProvider(theFhirContext, theJobCoordinator, theRequestPartitionHelperSvc);
public ReindexProvider reindexProvider(FhirContext theFhirContext, IJobCoordinator theJobCoordinator, IRequestPartitionHelperSvc theRequestPartitionHelperSvc, UrlPartitioner theUrlPartitioner) {
return new ReindexProvider(theFhirContext, theJobCoordinator, theRequestPartitionHelperSvc, theUrlPartitioner);
}
}

View File

@ -1,49 +0,0 @@
package ca.uhn.fhir.batch2.jobs.reindex;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.jobs.step.IIdChunkProducer;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Date;
public class ReindexIdChunkProducer implements IIdChunkProducer<ReindexChunkRangeJson> {
private static final Logger ourLog = LoggerFactory.getLogger(ReindexIdChunkProducer.class);
private final IResourceReindexSvc myResourceReindexSvc;
public ReindexIdChunkProducer(IResourceReindexSvc theResourceReindexSvc) {
myResourceReindexSvc = theResourceReindexSvc;
}
@Override
public IResourcePidList fetchResourceIdsPage(Date theNextStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, ReindexChunkRangeJson theData) {
String url = theData.getUrl();
ourLog.info("Fetching resource ID chunk for URL {} - Range {} - {}", url, theNextStart, theEnd);
return myResourceReindexSvc.fetchResourceIdsPage(theNextStart, theEnd, thePageSize, theRequestPartitionId, url);
}
}

View File

@ -20,35 +20,8 @@ package ca.uhn.fhir.batch2.jobs.reindex;
* #L%
*/
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedJobParameters;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.Validate;
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.validation.constraints.Pattern;
import java.util.ArrayList;
import java.util.List;
public class ReindexJobParameters extends PartitionedJobParameters {
@JsonProperty("url")
@Nullable
private List<@Pattern(regexp = "^[A-Z][A-Za-z0-9]+\\?.*", message = "If populated, URL must be a search URL in the form '{resourceType}?[params]'") String> myUrl;
public List<String> getUrl() {
if (myUrl == null) {
myUrl = new ArrayList<>();
}
return myUrl;
}
public ReindexJobParameters addUrl(@Nonnull String theUrl) {
Validate.notNull(theUrl);
getUrl().add(theUrl);
return this;
}
public class ReindexJobParameters extends PartitionedUrlListJobParameters {
}

View File

@ -21,30 +21,22 @@ package ca.uhn.fhir.batch2.jobs.reindex;
*/
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.jpa.api.svc.IResourceReindexSvc;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.List;
public class ReindexJobParametersValidator implements IJobParametersValidator<ReindexJobParameters> {
private final UrlListValidator myUrlListValidator;
@Autowired
private IResourceReindexSvc myResourceReindexSvc;
public ReindexJobParametersValidator(UrlListValidator theUrlListValidator) {
myUrlListValidator = theUrlListValidator;
}
@Nullable
@Override
public List<String> validate(@Nonnull ReindexJobParameters theParameters) {
if (theParameters.getUrl().isEmpty()) {
if (!myResourceReindexSvc.isAllResourceTypeSupported()) {
return Collections.singletonList("At least one type-specific search URL must be provided for " + ProviderConstants.OPERATION_REINDEX + " on this server");
}
}
return Collections.emptyList();
public List<String> validate(@NotNull ReindexJobParameters theParameters) {
return myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
}
}

Some files were not shown because too many files have changed in this diff Show More