yMerge branch 'master' into 2849_add_new_mdm_param
This commit is contained in:
commit
d467c1c79b
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -42,8 +42,9 @@ public class ReadPartitionIdRequestDetails {
|
|||
myConditionalTargetOrNull = theConditionalTargetOrNull;
|
||||
}
|
||||
|
||||
public IBaseResource getConditionalTargetOrNull() {
|
||||
return myConditionalTargetOrNull;
|
||||
public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) {
|
||||
RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null);
|
||||
}
|
||||
|
||||
public String getResourceType() {
|
||||
|
@ -62,9 +63,8 @@ public class ReadPartitionIdRequestDetails {
|
|||
return mySearchParams;
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) {
|
||||
RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null);
|
||||
public IBaseResource getConditionalTargetOrNull() {
|
||||
return myConditionalTargetOrNull;
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, Object theParams, IBaseResource theConditionalOperationTargetOrNull) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.interceptor.model;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -155,6 +156,10 @@ public class RequestPartitionId implements IModelJson {
|
|||
.toHashCode();
|
||||
}
|
||||
|
||||
public String toJson() {
|
||||
return JsonUtil.serializeOrInvalidRequest(this);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Integer getFirstPartitionIdOrNull() {
|
||||
if (myPartitionIds != null) {
|
||||
|
|
|
@ -449,6 +449,15 @@ public class BundleUtil {
|
|||
return toListOfResourcesOfType(theContext, theBundle, IBaseResource.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all of ids of all the resources from a given bundle
|
||||
*/
|
||||
public static List<String> toListOfResourceIds(FhirContext theContext, IBaseBundle theBundle) {
|
||||
return toListOfResourcesOfType(theContext, theBundle, IBaseResource.class).stream()
|
||||
.map(resource -> resource.getIdElement().getIdPart())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all of the resources of a given type from a given bundle
|
||||
*/
|
||||
|
|
|
@ -20,7 +20,10 @@ package ca.uhn.fhir.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
|
||||
|
@ -82,4 +85,11 @@ public class JsonUtil {
|
|||
theWriter.append(serialize(theInput));
|
||||
}
|
||||
|
||||
public static String serializeOrInvalidRequest(IModelJson theJson) {
|
||||
try {
|
||||
return ourMapperNonPrettyPrint.writeValueAsString(theJson);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new InvalidRequestException("Failed to encode " + theJson.getClass(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,16 +3,16 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
type: security
|
||||
issue: 2835
|
||||
title: "Addressed the following CVE report by bumping the minor version for Jetty in the root POM:
|
||||
<ul>
|
||||
<li>
|
||||
[CVE-2021-34429](https://github.com/advisories/GHSA-vjv5-gp2w-65vm)
|
||||
</li>
|
||||
</ul>"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2845
|
||||
title: "Added new `$reindex` operation with similar syntax to `$delete-expunge` that creates a spring-batch job to reindex selected resources.
|
||||
`$mark-all-resources-for-reindexing` and `$perform-reindexing-pass` are now deprecated, and will likely be removed in a future release."
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -97,7 +97,8 @@ public class DaoConfig {
|
|||
private static final Integer DEFAULT_MAXIMUM_SEARCH_RESULT_COUNT_IN_TRANSACTION = null;
|
||||
private static final Integer DEFAULT_MAXIMUM_TRANSACTION_BUNDLE_SIZE = null;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DaoConfig.class);
|
||||
private static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800;
|
||||
public static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800;
|
||||
private static final int DEFAULT_REINDEX_BATCH_SIZE = 800;
|
||||
private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60;
|
||||
/**
|
||||
* Child Configurations
|
||||
|
@ -163,6 +164,7 @@ public class DaoConfig {
|
|||
private boolean myExpungeEnabled;
|
||||
private boolean myDeleteExpungeEnabled;
|
||||
private int myExpungeBatchSize = DEFAULT_EXPUNGE_BATCH_SIZE;
|
||||
private int myReindexBatchSize = DEFAULT_REINDEX_BATCH_SIZE;
|
||||
private int myReindexThreadCount;
|
||||
private int myExpungeThreadCount;
|
||||
private Set<String> myBundleTypesAllowedForStorage;
|
||||
|
@ -217,6 +219,12 @@ public class DaoConfig {
|
|||
* @since 5.2.0
|
||||
*/
|
||||
private boolean myUseLegacySearchBuilder = false;
|
||||
|
||||
/**
|
||||
* @since 5.5.0
|
||||
*/
|
||||
private boolean myReindexEnabled = true;
|
||||
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
|
@ -1646,6 +1654,38 @@ public class DaoConfig {
|
|||
myExpungeBatchSize = theExpungeBatchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* The reindex batch size (default 800) determines the number of records reindexed in a single transaction.
|
||||
*/
|
||||
public int getReindexBatchSize() {
|
||||
return myReindexBatchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* The reindex batch size (default 800) determines the number of records reindexed in a single transaction.
|
||||
*/
|
||||
public void setReindexBatchSize(int theReindexBatchSize) {
|
||||
myReindexBatchSize = theReindexBatchSize;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If set to <code>false</code> (default is <code>true</code>), reindexing of resources will be disabled on this
|
||||
* server.
|
||||
*/
|
||||
public boolean isReindexEnabled() {
|
||||
return myReindexEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>false</code> (default is <code>true</code>), reindexing of resources will be disabled on this
|
||||
* server.
|
||||
*/
|
||||
|
||||
public void setReindexEnabled(boolean theReindexEnabled) {
|
||||
myReindexEnabled = theReindexEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should resources be marked as needing reindexing when a
|
||||
* SearchParameter resource is added or changed. This should generally
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -150,7 +150,6 @@
|
|||
<artifactId>hapi-fhir-jpaserver-batch</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.ttddyy</groupId>
|
||||
<artifactId>datasource-proxy</artifactId>
|
||||
|
|
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.batch;
|
|||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexEverythingJobConfig;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
|
@ -36,7 +38,9 @@ import java.util.Set;
|
|||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class,
|
||||
DeleteExpungeJobConfig.class
|
||||
DeleteExpungeJobConfig.class,
|
||||
ReindexJobConfig.class,
|
||||
ReindexEverythingJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
|
||||
|
@ -79,4 +83,15 @@ public class BatchJobsConfig {
|
|||
* Delete Expunge
|
||||
*/
|
||||
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
|
||||
|
||||
/**
|
||||
* Reindex
|
||||
*/
|
||||
public static final String REINDEX_JOB_NAME = "reindexJob";
|
||||
|
||||
/**
|
||||
* Reindex Everything
|
||||
*/
|
||||
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -21,11 +21,10 @@ package ca.uhn.fhir.jpa.delete.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
@ -35,11 +34,12 @@ import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidRe
|
|||
/**
|
||||
* This class will prevent a job from running any of the provided URLs are not valid on this server.
|
||||
*/
|
||||
public class DeleteExpungeJobParameterValidator implements JobParametersValidator {
|
||||
public class MultiUrlJobParameterValidator implements JobParametersValidator {
|
||||
public static String JOB_PARAM_OPERATION_NAME = "operation-name";
|
||||
private final MatchUrlService myMatchUrlService;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
|
||||
public DeleteExpungeJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
public MultiUrlJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
myMatchUrlService = theMatchUrlService;
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
@ -54,13 +54,13 @@ public class DeleteExpungeJobParameterValidator implements JobParametersValidato
|
|||
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
|
||||
String url = partitionedUrl.getUrl();
|
||||
try {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url, partitionedUrl.getRequestPartitionId());
|
||||
String resourceName = resourceSearch.getResourceName();
|
||||
if (!myDaoRegistry.isResourceTypeSupported(resourceName)) {
|
||||
throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
|
||||
}
|
||||
} catch (UnsupportedOperationException e) {
|
||||
throw new JobParametersInvalidException("Failed to parse " + ProviderConstants.OPERATION_DELETE_EXPUNGE + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
|
||||
throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
public class MultiUrlProcessorJobConfig {
|
||||
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class PartitionedUrlValidator {
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Autowired
|
||||
FhirContext myFhirContext;
|
||||
|
||||
/**
|
||||
* This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request
|
||||
*/
|
||||
|
||||
public RequestListJson buildRequestListJson(RequestDetails theRequest, List<String> theUrlsToProcess) {
|
||||
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
|
||||
for (String url : theUrlsToProcess) {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap(), null);
|
||||
partitionedUrls.add(new PartitionedUrl(url, requestPartitionId));
|
||||
}
|
||||
RequestListJson retval = new RequestListJson();
|
||||
retval.setPartitionedUrls(partitionedUrls);
|
||||
return retval;
|
||||
}
|
||||
|
||||
public RequestPartitionId requestPartitionIdFromRequest(RequestDetails theRequest) {
|
||||
Set<String> allResourceNames = myFhirContext.getResourceTypes();
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
// Verify that the user has access to every resource type on the server:
|
||||
for (String resourceName : allResourceNames) {
|
||||
myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceName, map, null);
|
||||
}
|
||||
// Then return the partition for the Patient resource type. Note Patient was an arbitrary choice here.
|
||||
return myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, "Patient", map, null);
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.delete.model;
|
||||
package ca.uhn.fhir.jpa.batch.job.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.delete.model;
|
||||
package ca.uhn.fhir.jpa.batch.job.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.delete.model;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -60,13 +60,15 @@ public class RequestListJson implements IModelJson {
|
|||
}
|
||||
}
|
||||
|
||||
public String toJson() {
|
||||
return JsonUtil.serializeOrInvalidRequest(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return ourObjectMapper.writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new InvalidRequestException("Failed to encode " + RequestListJson.class, e);
|
||||
}
|
||||
return "RequestListJson{" +
|
||||
"myPartitionedUrls=" + myPartitionedUrls +
|
||||
'}';
|
||||
}
|
||||
|
||||
public List<PartitionedUrl> getPartitionedUrls() {
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.batch.listener;
|
|||
*/
|
||||
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.annotation.AfterProcess;
|
||||
import org.springframework.batch.core.annotation.AfterRead;
|
||||
import org.springframework.batch.core.annotation.BeforeStep;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -40,8 +40,8 @@ public class PidReaderCounterListener {
|
|||
myStepExecution = stepExecution;
|
||||
}
|
||||
|
||||
@AfterProcess
|
||||
public void afterProcess(List<Long> thePids, List<String> theSqlList) {
|
||||
@AfterRead
|
||||
public void afterRead(List<Long> thePids) {
|
||||
myTotalPidsProcessed += thePids.size();
|
||||
myStepExecution.getExecutionContext().putLong(RESOURCE_TOTAL_PROCESSED, myTotalPidsProcessed);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class BatchDateThresholdUpdater {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchDateThresholdUpdater.class);
|
||||
|
||||
private Function<Long, Date> myDateFromPid;
|
||||
|
||||
public BatchDateThresholdUpdater() {
|
||||
}
|
||||
|
||||
public BatchDateThresholdUpdater(Function<Long, Date> theDateFromPid) {
|
||||
myDateFromPid = theDateFromPid;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is used by batch jobs that process resource pids by date in multiple passes. It's used to ensure
|
||||
* the same resource isn't processed twice. What it does is after a pass of processing pids, it sets
|
||||
* the threshold date for the next pass from the last resource on the list and collects all of the resources that have that date into a temporary cache
|
||||
* so that the caller can exclude those from the next pass.
|
||||
*
|
||||
* @param thePrevThreshold the date threshold from the previous pass
|
||||
* @param theAlreadyProcessedPidsWithThresholdDate the set to load pids into that have the new threshold
|
||||
* @param theProcessedPidsOrderedByDate the pids ordered by date (can be ascending or descending)
|
||||
* @return the new date threshold (can be the same as the old threshold if all pids on the list share the same date)
|
||||
*/
|
||||
|
||||
public Date updateThresholdAndCache(Date thePrevThreshold, Set<Long> theAlreadyProcessedPidsWithThresholdDate, List<Long> theProcessedPidsOrderedByDate) {
|
||||
if (theProcessedPidsOrderedByDate.isEmpty()) {
|
||||
return thePrevThreshold;
|
||||
}
|
||||
|
||||
// Adjust the low threshold to be the last resource in the batch we found
|
||||
Long pidOfLatestResourceInBatch = theProcessedPidsOrderedByDate.get(theProcessedPidsOrderedByDate.size() - 1);
|
||||
Date latestUpdatedDate = myDateFromPid.apply(pidOfLatestResourceInBatch);
|
||||
|
||||
// The latest date has changed, create a new cache to store pids with that date
|
||||
if (thePrevThreshold != latestUpdatedDate) {
|
||||
theAlreadyProcessedPidsWithThresholdDate.clear();
|
||||
}
|
||||
theAlreadyProcessedPidsWithThresholdDate.add(pidOfLatestResourceInBatch);
|
||||
|
||||
Date newThreshold = latestUpdatedDate;
|
||||
if (theProcessedPidsOrderedByDate.size() <= 1) {
|
||||
return newThreshold;
|
||||
}
|
||||
|
||||
// There is more than one resource in this batch, add any others with the same date. Assume the list is ordered by date.
|
||||
for (int index = theProcessedPidsOrderedByDate.size() - 2; index >= 0; --index) {
|
||||
Long pid = theProcessedPidsOrderedByDate.get(index);
|
||||
Date newDate = myDateFromPid.apply(pid);
|
||||
if (!latestUpdatedDate.equals(newDate)) {
|
||||
break;
|
||||
}
|
||||
theAlreadyProcessedPidsWithThresholdDate.add(pid);
|
||||
}
|
||||
|
||||
return newThreshold;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param theDateFromPid this is a Function to extract a date from a resource id
|
||||
* @return
|
||||
*/
|
||||
public BatchDateThresholdUpdater setDateFromPid(Function<Long, Date> theDateFromPid) {
|
||||
myDateFromPid = theDateFromPid;
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* This service is used by batch processes to search resources
|
||||
*/
|
||||
public class BatchResourceSearcher {
|
||||
@Autowired
|
||||
private SearchBuilderFactory mySearchBuilderFactory;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
public IResultIterator performSearch(ResourceSearch theResourceSearch, Integer theBatchSize) {
|
||||
String resourceName = theResourceSearch.getResourceName();
|
||||
RequestPartitionId requestPartitionId = theResourceSearch.getRequestPartitionId();
|
||||
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceName);
|
||||
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, theResourceSearch.getResourceType());
|
||||
sb.setFetchSize(theBatchSize);
|
||||
SystemRequestDetails requestDetails = buildSystemRequestDetails(requestPartitionId);
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(requestDetails, UUID.randomUUID().toString());
|
||||
IResultIterator resultIter = sb.createQuery(theResourceSearch.getSearchParameterMap(), searchRuntimeDetails, requestDetails, requestPartitionId);
|
||||
return resultIter;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private SystemRequestDetails buildSystemRequestDetails(RequestPartitionId theRequestPartitionId) {
|
||||
SystemRequestDetails retval = new SystemRequestDetails();
|
||||
retval.setRequestPartitionId(theRequestPartitionId);
|
||||
return retval;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 3 parameters:
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
|
||||
* {@link #JOB_PARAM_REQUEST_PARTITION}: (optional) The partition of resources to read
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching resources are available. It returns the resources in reverse chronological order
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public class CronologicalBatchAllResourcePidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
public static final String JOB_PARAM_REQUEST_PARTITION = "request-partition";
|
||||
public static final String CURRENT_THRESHOLD_LOW = "current.threshold-low";
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CronologicalBatchAllResourcePidReader.class);
|
||||
private static final Date BEGINNING_OF_TIME = new Date(0);
|
||||
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
private Integer myBatchSize;
|
||||
private Date myThresholdLow = BEGINNING_OF_TIME;
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater(this::dateFromPid);
|
||||
private final Set<Long> myAlreadyProcessedPidsWithLowDate = new HashSet<>();
|
||||
private Date myStartTime;
|
||||
private RequestPartitionId myRequestPartitionId;
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson()));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
return nextBatch.isEmpty() ? null : nextBatch;
|
||||
}
|
||||
|
||||
private Date dateFromPid(Long thePid) {
|
||||
ResourceTable entity = myResourceTableDao.findById(thePid).orElseThrow(IllegalStateException::new);
|
||||
return entity.getUpdatedDate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (myBatchSize == null) {
|
||||
myBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
if (executionContext.containsKey(CURRENT_THRESHOLD_LOW)) {
|
||||
myThresholdLow = new Date(executionContext.getLong(CURRENT_THRESHOLD_LOW));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_THRESHOLD_LOW, myThresholdLow.getTime());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
private List<Long> getNextBatch() {
|
||||
PageRequest page = PageRequest.of(0, myBatchSize);
|
||||
List<Long> retval = new ArrayList<>();
|
||||
Slice<Long> slice;
|
||||
do {
|
||||
if (myRequestPartitionId == null || myRequestPartitionId.isAllPartitions()) {
|
||||
slice = myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime);
|
||||
} else {
|
||||
slice = myResourceTableDao.findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime, myRequestPartitionId.getFirstPartitionIdOrNull());
|
||||
}
|
||||
retval.addAll(slice.getContent());
|
||||
retval.removeAll(myAlreadyProcessedPidsWithLowDate);
|
||||
page = page.next();
|
||||
} while (retval.size() < myBatchSize && slice.hasNext());
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Results: {}", retval);
|
||||
}
|
||||
myThresholdLow = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdLow, myAlreadyProcessedPidsWithLowDate, retval);
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setRequestPartitionId(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_PARTITION + "']}") String theRequestPartitionIdJson) throws JsonProcessingException {
|
||||
if (theRequestPartitionIdJson == null) {
|
||||
return;
|
||||
}
|
||||
myRequestPartitionId = RequestPartitionId.fromJson(theRequestPartitionIdJson);
|
||||
}
|
||||
}
|
|
@ -21,12 +21,15 @@ package ca.uhn.fhir.jpa.batch.reader;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -35,9 +38,12 @@ import ca.uhn.fhir.rest.api.SortOrderEnum;
|
|||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
|
@ -46,10 +52,14 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
|
@ -65,6 +75,7 @@ import java.util.stream.Collectors;
|
|||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public class ReverseCronologicalBatchResourcePidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
|
@ -72,7 +83,6 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader<Lis
|
|||
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
@ -81,11 +91,15 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader<Lis
|
|||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private BatchResourceSearcher myBatchResourceSearcher;
|
||||
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
|
||||
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
|
||||
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
|
@ -108,8 +122,7 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader<Lis
|
|||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch;
|
||||
nextBatch = getNextBatch();
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
|
@ -121,51 +134,53 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader<Lis
|
|||
}
|
||||
|
||||
private List<Long> getNextBatch() {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl());
|
||||
SearchParameterMap map = buildSearchParameterMap(resourceSearch);
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
|
||||
addDateCountAndSortToSearch(resourceSearch);
|
||||
|
||||
// Perform the search
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
|
||||
List<Long> retval = dao.searchForIds(map, buildSystemRequestDetails()).stream()
|
||||
.map(ResourcePersistentId::getIdAsLong)
|
||||
.collect(Collectors.toList());
|
||||
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, myBatchSize);
|
||||
Set<Long> newPids = new LinkedHashSet<>();
|
||||
Set<Long> alreadySeenPids = myAlreadyProcessedPidsWithHighDate.computeIfAbsent(myUrlIndex, i -> new HashSet<>());
|
||||
|
||||
do {
|
||||
List<Long> pids = resultIter.getNextResultBatch(myBatchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
newPids.addAll(pids);
|
||||
newPids.removeAll(alreadySeenPids);
|
||||
} while (newPids.size() < myBatchSize && resultIter.hasNext());
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), map.toNormalizedQueryString(myFhirContext), retval.size());
|
||||
ourLog.debug("Results: {}", retval);
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
|
||||
ourLog.debug("Results: {}", newPids);
|
||||
}
|
||||
|
||||
if (!retval.isEmpty()) {
|
||||
// Adjust the high threshold to be the earliest resource in the batch we found
|
||||
Long pidOfOldestResourceInBatch = retval.get(retval.size() - 1);
|
||||
IBaseResource earliestResource = dao.readByPid(new ResourcePersistentId(pidOfOldestResourceInBatch));
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, earliestResource.getMeta().getLastUpdated());
|
||||
}
|
||||
setDateFromPidFunction(resourceSearch);
|
||||
|
||||
List<Long> retval = new ArrayList<>(newPids);
|
||||
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdHighByUrlIndex.get(myUrlIndex), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private SearchParameterMap buildSearchParameterMap(ResourceSearch resourceSearch) {
|
||||
private void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
|
||||
|
||||
myBatchDateThresholdUpdater.setDateFromPid(pid -> {
|
||||
IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid));
|
||||
return oldestResource.getMeta().getLastUpdated();
|
||||
});
|
||||
}
|
||||
|
||||
private void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
return map;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private SystemRequestDetails buildSystemRequestDetails() {
|
||||
SystemRequestDetails retval = new SystemRequestDetails();
|
||||
retval.setRequestPartitionId(myPartitionedUrls.get(myUrlIndex).getRequestPartitionId());
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (myBatchSize == null) {
|
||||
myBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
|
@ -197,4 +212,17 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader<Lis
|
|||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,6 +54,8 @@ public class SqlExecutorWriter implements ItemWriter<List<String>> {
|
|||
|
||||
@Override
|
||||
public void write(List<? extends List<String>> theSqlLists) throws Exception {
|
||||
|
||||
// Note that since our chunk size is 1, there will always be exactly one list
|
||||
for (List<String> sqlList : theSqlLists) {
|
||||
ourLog.info("Executing {} sql commands", sqlList.size());
|
||||
for (String sql : sqlList) {
|
||||
|
|
|
@ -58,8 +58,8 @@ public class ResourceToFileWriter implements ItemWriter<List<IBaseResource>> {
|
|||
@Autowired
|
||||
private BulkExportDaoSvc myBulkExportDaoSvc;
|
||||
|
||||
private ByteArrayOutputStream myOutputStream;
|
||||
private OutputStreamWriter myWriter;
|
||||
private final ByteArrayOutputStream myOutputStream;
|
||||
private final OutputStreamWriter myWriter;
|
||||
private IParser myParser;
|
||||
|
||||
@Value("#{stepExecutionContext['bulkExportCollectionEntityId']}")
|
||||
|
|
|
@ -15,6 +15,8 @@ import ca.uhn.fhir.jpa.batch.BatchConstants;
|
|||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher;
|
||||
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||
|
@ -38,7 +40,6 @@ import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
|||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.IResourceExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoResourceLinkResolver;
|
||||
|
@ -83,6 +84,7 @@ import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
|||
import ca.uhn.fhir.jpa.provider.DiffProvider;
|
||||
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.jpa.reindex.ReindexJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory;
|
||||
import ca.uhn.fhir.jpa.sched.HapiSchedulerServiceImpl;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
|
@ -120,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
|||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.warm.CacheWarmingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
||||
|
@ -135,10 +138,12 @@ import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
|||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
|
||||
import ca.uhn.fhir.rest.server.provider.ReindexProvider;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -416,6 +421,16 @@ public abstract class BaseConfig {
|
|||
return new ResourceReindexingSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceReindexer resourceReindexer(FhirContext theFhirContext) {
|
||||
return new ResourceReindexer(theFhirContext);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public BatchResourceSearcher myBatchResourceSearcher() {
|
||||
return new BatchResourceSearcher();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IStaleSearchDeletingSvc staleSearchDeletingSvc() {
|
||||
return new StaleSearchDeletingSvcImpl();
|
||||
|
@ -534,16 +549,34 @@ public abstract class BaseConfig {
|
|||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter() {
|
||||
public IDeleteExpungeJobSubmitter deleteExpungeJobSubmitter() {
|
||||
return new DeleteExpungeJobSubmitterImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public PartitionedUrlValidator partitionedUrlValidator() {
|
||||
return new PartitionedUrlValidator();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IReindexJobSubmitter myReindexJobSubmitter() {
|
||||
return new ReindexJobSubmitterImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
|
||||
return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public ReindexProvider reindexProvider(FhirContext theFhirContext, IReindexJobSubmitter theReindexJobSubmitter) {
|
||||
return new ReindexProvider(theFhirContext, theReindexJobSubmitter);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IBulkDataImportSvc bulkDataImportSvc() {
|
||||
|
@ -863,11 +896,6 @@ public abstract class BaseConfig {
|
|||
return new DeleteExpungeService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PartitionRunner partitionRunner(DaoConfig theDaoConfig) {
|
||||
return new PartitionRunner(theDaoConfig);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceTableFKProvider resourceTableFKProvider() {
|
||||
return new ResourceTableFKProvider();
|
||||
|
|
|
@ -126,8 +126,8 @@ import org.springframework.transaction.support.TransactionSynchronizationAdapter
|
|||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.TypedQuery;
|
||||
|
@ -588,7 +588,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
List<String> urlsToDeleteExpunge = Collections.singletonList(theUrl);
|
||||
try {
|
||||
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), theRequest, urlsToDeleteExpunge);
|
||||
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest);
|
||||
return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobExecution.getId()));
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid Delete Expunge Request: " + e.getMessage(), e);
|
||||
|
|
|
@ -27,12 +27,13 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.*;
|
||||
import ca.uhn.fhir.rest.api.server.*;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
|
|
@ -7,7 +7,6 @@ import org.springframework.data.jpa.repository.Modifying;
|
|||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -32,6 +31,7 @@ import java.util.Optional;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public interface IResourceReindexJobDao extends JpaRepository<ResourceReindexJobEntity, Long> {
|
||||
|
||||
@Modifying
|
||||
|
|
|
@ -54,6 +54,10 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
|
|||
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC")
|
||||
Slice<Long> findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh);
|
||||
|
||||
// TODO in the future, consider sorting by pid as well so batch jobs process in the same order across restarts
|
||||
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue = :partition_id ORDER BY t.myUpdated ASC")
|
||||
Slice<Long> findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh, @Param("partition_id") Integer theRequestPartitionId);
|
||||
|
||||
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myResourceType = :restype ORDER BY t.myUpdated ASC")
|
||||
Slice<Long> findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("restype") String theResourceType, @Param("low") Date theLow, @Param("high") Date theHigh);
|
||||
|
||||
|
|
|
@ -31,8 +31,8 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.dstu3.model.Observation;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
|||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -69,8 +70,6 @@ public class DeleteExpungeService {
|
|||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private PartitionRunner myPartitionRunner;
|
||||
@Autowired
|
||||
private ResourceTableFKProvider myResourceTableFKProvider;
|
||||
@Autowired
|
||||
private IResourceLinkDao myResourceLinkDao;
|
||||
|
@ -99,7 +98,8 @@ public class DeleteExpungeService {
|
|||
ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber());
|
||||
AtomicLong expungedEntitiesCount = new AtomicLong();
|
||||
AtomicLong expungedResourcesCount = new AtomicLong();
|
||||
myPartitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
|
||||
ourLog.info("Expunged a total of {} records", expungedEntitiesCount);
|
||||
|
||||
IBaseOperationOutcome oo;
|
||||
|
@ -131,7 +131,8 @@ public class DeleteExpungeService {
|
|||
}
|
||||
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
myPartitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
@ -38,13 +38,13 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
@Scope("prototype")
|
||||
public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ExpungeService.class);
|
||||
public static final String PROCESS_NAME = "Expunging";
|
||||
public static final String THREAD_PREFIX = "expunge";
|
||||
|
||||
@Autowired
|
||||
private IResourceExpungeService myExpungeDaoService;
|
||||
@Autowired
|
||||
private PartitionRunner myPartitionRunner;
|
||||
@Autowired
|
||||
protected IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
private final String myResourceName;
|
||||
private final Long myResourceId;
|
||||
|
@ -113,15 +113,19 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
private void expungeOldVersions() {
|
||||
Slice<Long> historicalIds = findHistoricalVersionsOfNonDeletedResources();
|
||||
|
||||
myPartitionRunner.runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount));
|
||||
getPartitionRunner().runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
private PartitionRunner getPartitionRunner() {
|
||||
return new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
}
|
||||
|
||||
private void deleteCurrentVersionsOfDeletedResources(Slice<Long> theResourceIds) {
|
||||
myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount));
|
||||
getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
private void deleteHistoricalVersions(Slice<Long> theResourceIds) {
|
||||
myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount));
|
||||
getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
private ExpungeOutcome expungeOutcome() {
|
||||
|
|
|
@ -20,18 +20,13 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -46,16 +41,20 @@ import java.util.concurrent.ThreadPoolExecutor;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
@Service
|
||||
public class PartitionRunner {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ExpungeService.class);
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(PartitionRunner.class);
|
||||
private static final int MAX_POOL_SIZE = 1000;
|
||||
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final String myProcessName;
|
||||
private final String myThreadPrefix;
|
||||
private final int myBatchSize;
|
||||
private final int myThreadCount;
|
||||
|
||||
@Autowired
|
||||
public PartitionRunner(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public PartitionRunner(String theProcessName, String theThreadPrefix, int theBatchSize, int theThreadCount) {
|
||||
myProcessName = theProcessName;
|
||||
myThreadPrefix = theThreadPrefix;
|
||||
myBatchSize = theBatchSize;
|
||||
myThreadCount = theThreadCount;
|
||||
}
|
||||
|
||||
public void runInPartitionedThreads(Slice<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
|
||||
|
@ -70,7 +69,7 @@ public class PartitionRunner {
|
|||
callableTasks.get(0).call();
|
||||
return;
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Error while expunging.", e);
|
||||
ourLog.error("Error while " + myProcessName, e);
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
@ -83,10 +82,10 @@ public class PartitionRunner {
|
|||
future.get();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
ourLog.error("Interrupted while expunging.", e);
|
||||
ourLog.error("Interrupted while " + myProcessName, e);
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (ExecutionException e) {
|
||||
ourLog.error("Error while expunging.", e);
|
||||
ourLog.error("Error while " + myProcessName, e);
|
||||
throw new InternalErrorException(e);
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
|
@ -96,12 +95,13 @@ public class PartitionRunner {
|
|||
private List<Callable<Void>> buildCallableTasks(Slice<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
|
||||
List<Callable<Void>> retval = new ArrayList<>();
|
||||
|
||||
List<List<Long>> partitions = Lists.partition(theResourceIds.getContent(), myDaoConfig.getExpungeBatchSize());
|
||||
ourLog.info("Splitting batch job of {} entries into chunks of {}", theResourceIds.getContent().size(), myBatchSize);
|
||||
List<List<Long>> partitions = Lists.partition(theResourceIds.getContent(), myBatchSize);
|
||||
|
||||
for (List<Long> nextPartition : partitions) {
|
||||
if (nextPartition.size() > 0) {
|
||||
Callable<Void> callableTask = () -> {
|
||||
ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size());
|
||||
ourLog.info(myProcessName + " {} resources", nextPartition.size());
|
||||
partitionConsumer.accept(nextPartition);
|
||||
return null;
|
||||
};
|
||||
|
@ -113,24 +113,24 @@ public class PartitionRunner {
|
|||
}
|
||||
|
||||
private ExecutorService buildExecutor(int numberOfTasks) {
|
||||
int threadCount = Math.min(numberOfTasks, myDaoConfig.getExpungeThreadCount());
|
||||
int threadCount = Math.min(numberOfTasks, myThreadCount);
|
||||
assert (threadCount > 0);
|
||||
|
||||
ourLog.info("Expunging with {} threads", threadCount);
|
||||
ourLog.info(myProcessName + " with {} threads", threadCount);
|
||||
LinkedBlockingQueue<Runnable> executorQueue = new LinkedBlockingQueue<>(MAX_POOL_SIZE);
|
||||
BasicThreadFactory threadFactory = new BasicThreadFactory.Builder()
|
||||
.namingPattern("expunge-%d")
|
||||
.namingPattern(myThreadPrefix + "-%d")
|
||||
.daemon(false)
|
||||
.priority(Thread.NORM_PRIORITY)
|
||||
.build();
|
||||
RejectedExecutionHandler rejectedExecutionHandler = (theRunnable, theExecutor) -> {
|
||||
ourLog.info("Note: Expunge executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
|
||||
ourLog.info("Note: " + myThreadPrefix + " executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
|
||||
StopWatch sw = new StopWatch();
|
||||
try {
|
||||
executorQueue.put(theRunnable);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RejectedExecutionException("Task " + theRunnable.toString() +
|
||||
" rejected from " + e.toString());
|
||||
" rejected from " + e);
|
||||
}
|
||||
ourLog.info("Slot become available after {}ms", sw.getMillis());
|
||||
};
|
||||
|
|
|
@ -24,17 +24,18 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import org.springframework.batch.core.Job;
|
||||
|
@ -45,7 +46,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter {
|
||||
|
@ -63,12 +63,17 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
|
|||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
PartitionedUrlValidator myPartitionedUrlValidator;
|
||||
@Autowired
|
||||
IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List<String> theUrlsToDeleteExpunge) throws JobParametersInvalidException {
|
||||
List<RequestPartitionId> requestPartitionIds = requestPartitionIdsFromRequestAndUrls(theRequest, theUrlsToDeleteExpunge);
|
||||
public JobExecution submitJob(Integer theBatchSize, List<String> theUrlsToDeleteExpunge, RequestDetails theRequest) throws JobParametersInvalidException {
|
||||
if (theBatchSize == null) {
|
||||
theBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrlsToDeleteExpunge);
|
||||
if (!myDaoConfig.canDeleteExpunge()) {
|
||||
throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
|
||||
}
|
||||
|
@ -81,20 +86,7 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
|
|||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
}
|
||||
|
||||
JobParameters jobParameters = DeleteExpungeJobConfig.buildJobParameters(theBatchSize, theUrlsToDeleteExpunge, requestPartitionIds);
|
||||
JobParameters jobParameters = ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_DELETE_EXPUNGE, theBatchSize, requestListJson);
|
||||
return myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will throw an exception if the user is not allowed to add the requested resource type on the partition determined by the request
|
||||
*/
|
||||
private List<RequestPartitionId> requestPartitionIdsFromRequestAndUrls(RequestDetails theRequest, List<String> theUrlsToDeleteExpunge) {
|
||||
List<RequestPartitionId> retval = new ArrayList<>();
|
||||
for (String url : theUrlsToDeleteExpunge) {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceSearch.getResourceName(), null, null);
|
||||
retval.add(requestPartitionId);
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,19 +20,12 @@ package ca.uhn.fhir.jpa.delete.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
|
@ -43,11 +36,7 @@ import org.springframework.context.annotation.Bean;
|
|||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
|
||||
|
||||
|
@ -56,9 +45,8 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
|
|||
* Delete Expunge job.
|
||||
*/
|
||||
@Configuration
|
||||
public class DeleteExpungeJobConfig {
|
||||
public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
|
||||
private static final int MINUTES_IN_FUTURE_TO_DELETE_FROM = 1;
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
@ -67,26 +55,13 @@ public class DeleteExpungeJobConfig {
|
|||
|
||||
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job deleteExpungeJob(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) throws Exception {
|
||||
public Job deleteExpungeJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
|
||||
.validator(deleteExpungeJobParameterValidator(theFhirContext, theMatchUrlService, theDaoRegistry))
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.start(deleteExpungeUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(Integer theBatchSize, List<String> theUrlList, List<RequestPartitionId> theRequestPartitionIds) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
RequestListJson requestListJson = RequestListJson.fromUrlStringsAndRequestPartitionIds(theUrlList, theRequestPartitionIds);
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(requestListJson.toString()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MINUTES_IN_FUTURE_TO_DELETE_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step deleteExpungeUrlListStep() {
|
||||
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
|
||||
|
@ -95,20 +70,17 @@ public class DeleteExpungeJobConfig {
|
|||
.processor(deleteExpungeProcessor())
|
||||
.writer(sqlExecutorWriter())
|
||||
.listener(pidCountRecorderListener())
|
||||
.listener(promotionListener())
|
||||
.listener(deleteExpungePromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
public ExecutionContextPromotionListener deleteExpungePromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -116,24 +88,4 @@ public class DeleteExpungeJobConfig {
|
|||
public DeleteExpungeProcessor deleteExpungeProcessor() {
|
||||
return new DeleteExpungeProcessor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator deleteExpungeJobParameterValidator(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new DeleteExpungeJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener promotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,9 @@ import java.util.stream.Collectors;
|
|||
public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<String>> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProcessor.class);
|
||||
|
||||
public static final String PROCESS_NAME = "Delete Expunging";
|
||||
public static final String THREAD_PREFIX = "delete-expunge";
|
||||
|
||||
@Autowired
|
||||
ResourceTableFKProvider myResourceTableFKProvider;
|
||||
@Autowired
|
||||
|
@ -55,8 +58,6 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
|
|||
IdHelperService myIdHelper;
|
||||
@Autowired
|
||||
IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
PartitionRunner myPartitionRunner;
|
||||
|
||||
@Override
|
||||
public List<String> process(List<Long> thePids) throws Exception {
|
||||
|
@ -84,7 +85,8 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
|
|||
}
|
||||
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
myPartitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
|
|
|
@ -25,10 +25,19 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
import javax.persistence.*;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
@Deprecated
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_REINDEX_JOB")
|
||||
public class ResourceReindexJobEntity implements Serializable {
|
||||
|
|
|
@ -37,12 +37,12 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
|
|||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -56,12 +56,12 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.utilities.npm.IPackageCacheManager;
|
||||
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
|
@ -227,7 +227,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
create(next, theOutcome);
|
||||
} catch (Exception e) {
|
||||
ourLog.warn("Failed to upload resource of type {} with ID {} - Error: {}", myFhirContext.getResourceType(next), next.getIdElement().getValue(), e.toString());
|
||||
throw new ImplementationGuideInstallationException(String.format("Error installing IG %s#%s: %s", name, version, e.toString()), e);
|
||||
throw new ImplementationGuideInstallationException(String.format("Error installing IG %s#%s: %s", name, version, e), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -412,9 +412,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
|
||||
List<IPrimitiveType> statusTypes = myFhirContext.newFhirPath().evaluate(theResource, "status", IPrimitiveType.class);
|
||||
if (statusTypes.size() > 0) {
|
||||
if (!statusTypes.get(0).getValueAsString().equals("active")) {
|
||||
return false;
|
||||
}
|
||||
return statusTypes.get(0).getValueAsString().equals("active");
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
|
@ -23,7 +23,6 @@ package ca.uhn.fhir.jpa.partition;
|
|||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
|
|
@ -45,7 +45,17 @@ import java.util.Date;
|
|||
|
||||
public class BaseJpaSystemProvider<T, MT> extends BaseJpaProvider implements IJpaSystemProvider {
|
||||
|
||||
/**
|
||||
* @see ProviderConstants#OPERATION_REINDEX
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = "$mark-all-resources-for-reindexing";
|
||||
/**
|
||||
* @see ProviderConstants#OPERATION_REINDEX
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String PERFORM_REINDEXING_PASS = "$perform-reindexing-pass";
|
||||
|
||||
private IFhirSystemDao<T, MT> myDao;
|
||||
|
|
|
@ -26,13 +26,13 @@ import ca.uhn.fhir.rest.annotation.Operation;
|
|||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
|
@ -43,6 +43,11 @@ public abstract class BaseJpaSystemProviderDstu2Plus<T, MT> extends BaseJpaSyste
|
|||
@Operation(name = MARK_ALL_RESOURCES_FOR_REINDEXING, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = "status")
|
||||
})
|
||||
/**
|
||||
* @deprecated
|
||||
* @see ca.uhn.fhir.rest.server.provider.ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails)
|
||||
*/
|
||||
@Deprecated
|
||||
public IBaseResource markAllResourcesForReindexing(
|
||||
@OperationParam(name="type", min = 0, max = 1, typeName = "code") IPrimitiveType<String> theType
|
||||
) {
|
||||
|
@ -65,6 +70,11 @@ public abstract class BaseJpaSystemProviderDstu2Plus<T, MT> extends BaseJpaSyste
|
|||
@Operation(name = PERFORM_REINDEXING_PASS, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = "status")
|
||||
})
|
||||
/**
|
||||
* @deprecated
|
||||
* @see ca.uhn.fhir.rest.server.provider.ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails)
|
||||
*/
|
||||
@Deprecated
|
||||
public IBaseResource performReindexingPass() {
|
||||
Integer count = getResourceReindexingSvc().runReindexingPass();
|
||||
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
package ca.uhn.fhir.jpa.reindex;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.List;
|
||||
|
||||
public class ReindexJobSubmitterImpl implements IReindexJobSubmitter {
|
||||
@Autowired
|
||||
PartitionedUrlValidator myPartitionedUrlValidator;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.REINDEX_JOB_NAME)
|
||||
private Job myReindexJob;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME)
|
||||
private Job myReindexEverythingJob;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitJob(Integer theBatchSize, List<String> theUrlsToReindex, RequestDetails theRequest) throws JobParametersInvalidException {
|
||||
if (theBatchSize == null) {
|
||||
theBatchSize = myDaoConfig.getReindexBatchSize();
|
||||
}
|
||||
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrlsToReindex);
|
||||
if (!myDaoConfig.isReindexEnabled()) {
|
||||
throw new ForbiddenOperationException("Reindexing is disabled on this server.");
|
||||
}
|
||||
|
||||
/*
|
||||
* On the first time we run a particular reindex job, let's make sure we
|
||||
* have the latest search parameters loaded. A common reason to
|
||||
* be reindexing is that the search parameters have changed in some way, so
|
||||
* this makes sure we're on the latest versions
|
||||
*/
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
JobParameters jobParameters = ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_REINDEX, theBatchSize, requestListJson);
|
||||
return myBatchJobSubmitter.runJob(myReindexJob, jobParameters);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequest) throws JobParametersInvalidException {
|
||||
if (theBatchSize == null) {
|
||||
theBatchSize = myDaoConfig.getReindexBatchSize();
|
||||
}
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrlValidator.requestPartitionIdFromRequest(theRequest);
|
||||
if (!myDaoConfig.isReindexEnabled()) {
|
||||
throw new ForbiddenOperationException("Reindexing is disabled on this server.");
|
||||
}
|
||||
|
||||
/*
|
||||
* On the first time we run a particular reindex job, let's make sure we
|
||||
* have the latest search parameters loaded. A common reason to
|
||||
* be reindexing is that the search parameters have changed in some way, so
|
||||
* this makes sure we're on the latest versions
|
||||
*/
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
JobParameters jobParameters = CronologicalBatchAllResourcePidReader.buildJobParameters(theBatchSize, requestPartitionId);
|
||||
return myBatchJobSubmitter.runJob(myReindexEverythingJob, jobParameters);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
package ca.uhn.fhir.jpa.reindex.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Reindex job.
|
||||
*/
|
||||
@Configuration
|
||||
public class ReindexEverythingJobConfig {
|
||||
public static final String REINDEX_EVERYTHING_STEP_NAME = "reindex-everything-step";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Bean(name = REINDEX_EVERYTHING_JOB_NAME)
|
||||
@Lazy
|
||||
public Job reindexJob() {
|
||||
return myJobBuilderFactory.get(REINDEX_EVERYTHING_JOB_NAME)
|
||||
.start(reindexEverythingStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step reindexEverythingStep() {
|
||||
return myStepBuilderFactory.get(REINDEX_EVERYTHING_STEP_NAME)
|
||||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(cronologicalBatchAllResourcePidReader())
|
||||
.writer(reindexWriter())
|
||||
.listener(reindexEverythingPidCountRecorderListener())
|
||||
.listener(reindexEverythingPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public CronologicalBatchAllResourcePidReader cronologicalBatchAllResourcePidReader() {
|
||||
return new CronologicalBatchAllResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReindexWriter reindexWriter() {
|
||||
return new ReindexWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener reindexEverythingPidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener reindexEverythingPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
package ca.uhn.fhir.jpa.reindex.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Reindex job.
|
||||
*/
|
||||
@Configuration
|
||||
public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public static final String REINDEX_URL_LIST_STEP_NAME = "reindex-url-list-step";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Bean(name = REINDEX_JOB_NAME)
|
||||
@Lazy
|
||||
public Job reindexJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return myJobBuilderFactory.get(REINDEX_JOB_NAME)
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.start(reindexUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step reindexUrlListStep() {
|
||||
return myStepBuilderFactory.get(REINDEX_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.writer(reindexWriter())
|
||||
.listener(pidCountRecorderListener())
|
||||
.listener(reindexPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReindexWriter reindexWriter() {
|
||||
return new ReindexWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener reindexPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
package ca.uhn.fhir.jpa.reindex.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Reindex the provided list of pids of resources
|
||||
*/
|
||||
|
||||
public class ReindexWriter implements ItemWriter<List<Long>> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReindexWriter.class);
|
||||
|
||||
public static final String PROCESS_NAME = "Reindexing";
|
||||
public static final String THREAD_PREFIX = "reindex";
|
||||
|
||||
@Autowired
|
||||
ResourceReindexer myResourceReindexer;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTxManager;
|
||||
|
||||
@Override
|
||||
public void write(List<? extends List<Long>> thePidLists) throws Exception {
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
|
||||
|
||||
// Note that since our chunk size is 1, there will always be exactly one list
|
||||
for (List<Long> pidList : thePidLists) {
|
||||
partitionRunner.runInPartitionedThreads(new SliceImpl<>(pidList), pids -> reindexPids(pidList));
|
||||
}
|
||||
}
|
||||
|
||||
private void reindexPids(List<Long> pidList) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.executeWithoutResult(t -> pidList.forEach(pid -> myResourceReindexer.readAndReindexResourceByPid(pid)));
|
||||
}
|
||||
}
|
|
@ -57,14 +57,11 @@ import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
|
|||
import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory;
|
||||
import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.jpa.util.SqlQueryList;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
@ -75,6 +72,7 @@ import ca.uhn.fhir.model.primitive.InstantDt;
|
|||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
|
@ -85,6 +83,8 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
|
|||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.StringUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
package ca.uhn.fhir.jpa.search.reindex;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
@Service
|
||||
public class ResourceReindexer {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexer.class);
|
||||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
private IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
|
||||
public ResourceReindexer(FhirContext theFhirContext) {
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
public void readAndReindexResourceByPid(Long theResourcePid) {
|
||||
ResourceTable resourceTable = myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new);
|
||||
reindexResourceEntity(resourceTable);
|
||||
}
|
||||
|
||||
public void reindexResourceEntity(ResourceTable theResourceTable) {
|
||||
/*
|
||||
* This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id"
|
||||
*/
|
||||
ForcedId forcedId = theResourceTable.getForcedId();
|
||||
if (forcedId != null) {
|
||||
if (isBlank(forcedId.getResourceType())) {
|
||||
ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), theResourceTable.getResourceType());
|
||||
forcedId.setResourceType(theResourceTable.getResourceType());
|
||||
myForcedIdDao.save(forcedId);
|
||||
}
|
||||
}
|
||||
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceTable.getResourceType());
|
||||
long expectedVersion = theResourceTable.getVersion();
|
||||
IBaseResource resource = dao.readByPid(new ResourcePersistentId(theResourceTable.getId()), true);
|
||||
|
||||
if (resource == null) {
|
||||
throw new InternalErrorException("Could not find resource version " + theResourceTable.getIdDt().toUnqualified().getValue() + " in database");
|
||||
}
|
||||
|
||||
Long actualVersion = resource.getIdElement().getVersionIdPartAsLong();
|
||||
if (actualVersion < expectedVersion) {
|
||||
ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion);
|
||||
myResourceHistoryTableDao.updateVersion(theResourceTable.getId(), actualVersion, expectedVersion);
|
||||
}
|
||||
|
||||
doReindex(theResourceTable, resource);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
<T extends IBaseResource> void doReindex(ResourceTable theResourceTable, T theResource) {
|
||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResource.getClass());
|
||||
Class<T> resourceClass = (Class<T>) resourceDefinition.getImplementingClass();
|
||||
final IFhirResourceDao<T> dao = myDaoRegistry.getResourceDao(resourceClass);
|
||||
dao.reindex(theResource, theResourceTable);
|
||||
}
|
||||
}
|
|
@ -21,34 +21,28 @@ package ca.uhn.fhir.jpa.search.reindex;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -80,9 +74,13 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* @see ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||
|
||||
private static final Date BEGINNING_OF_TIME = new Date(0);
|
||||
|
@ -96,13 +94,11 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
private ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build();
|
||||
private final ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build();
|
||||
private ThreadPoolExecutor myTaskExecutor;
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private IForcedIdDao myForcedIdDao;
|
||||
|
@ -114,47 +110,19 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private ISchedulerService mySchedulerService;
|
||||
|
||||
@VisibleForTesting
|
||||
void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) {
|
||||
myReindexJobDao = theReindexJobDao;
|
||||
}
|
||||
@Autowired
|
||||
private ResourceReindexer myResourceReindexer;
|
||||
|
||||
@VisibleForTesting
|
||||
void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTxManagerForUnitTest(PlatformTransactionManager theTxManager) {
|
||||
myTxManager = theTxManager;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setResourceTableDaoForUnitTest(IResourceTableDao theResourceTableDao) {
|
||||
myResourceTableDao = theResourceTableDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setForcedIdDaoForUnitTest(IForcedIdDao theForcedIdDao) {
|
||||
myForcedIdDao = theForcedIdDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setContextForUnitTest(FhirContext theContext) {
|
||||
myContext = theContext;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSchedulerServiceForUnitTest(ISchedulerService theSchedulerService) {
|
||||
mySchedulerService = theSchedulerService;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
myTxTemplate = new TransactionTemplate(myTxManager);
|
||||
|
@ -173,6 +141,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
rejectHandler
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A handler for rejected tasks that will have the caller block until space is available.
|
||||
* This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though.
|
||||
|
@ -189,8 +158,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
|
||||
try {
|
||||
e.getQueue().put( r );
|
||||
}
|
||||
catch (InterruptedException e1) {
|
||||
} catch (InterruptedException e1) {
|
||||
ourLog.error("Interrupted Execption for task: {}",r, e1 );
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
|
@ -289,15 +257,15 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
ourLog.info("Cancelling and purging all resource reindexing jobs");
|
||||
myIndexingLock.lock();
|
||||
try {
|
||||
myTxTemplate.execute(t -> {
|
||||
myReindexJobDao.markAllOfTypeAsDeleted();
|
||||
return null;
|
||||
});
|
||||
myTxTemplate.execute(t -> {
|
||||
myReindexJobDao.markAllOfTypeAsDeleted();
|
||||
return null;
|
||||
});
|
||||
|
||||
myTaskExecutor.shutdown();
|
||||
initExecutor();
|
||||
myTaskExecutor.shutdown();
|
||||
initExecutor();
|
||||
|
||||
expungeJobsMarkedAsDeleted();
|
||||
expungeJobsMarkedAsDeleted();
|
||||
} finally {
|
||||
myIndexingLock.unlock();
|
||||
}
|
||||
|
@ -346,8 +314,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) {
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
public void setResourceReindexerForUnitTest(ResourceReindexer theResourceReindexer) {
|
||||
myResourceReindexer = theResourceReindexer;
|
||||
}
|
||||
|
||||
private int runReindexJob(ResourceReindexJobEntity theJob) {
|
||||
|
@ -387,7 +355,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
});
|
||||
Validate.notNull(range);
|
||||
int count = range.getNumberOfElements();
|
||||
ourLog.info("Loaded {} resources for reindexing in {}", count, pageSw.toString());
|
||||
ourLog.info("Loaded {} resources for reindexing in {}", count, pageSw);
|
||||
|
||||
// If we didn't find any results at all, mark as deleted
|
||||
if (count == 0) {
|
||||
|
@ -446,7 +414,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
return null;
|
||||
});
|
||||
|
||||
ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw.toString(), sw.formatThroughput(count, TimeUnit.SECONDS), new InstantType(newLow));
|
||||
ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw, sw.formatThroughput(count, TimeUnit.SECONDS), new InstantType(newLow));
|
||||
return counter.get();
|
||||
}
|
||||
|
||||
|
@ -465,7 +433,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
txTemplate.execute((TransactionCallback<Void>) theStatus -> {
|
||||
ourLog.info("Marking resource with PID {} as indexing_failed", new Object[]{theId});
|
||||
ourLog.info("Marking resource with PID {} as indexing_failed", theId);
|
||||
|
||||
myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED);
|
||||
|
||||
|
@ -527,63 +495,12 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
myCounter = theCounter;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T extends IBaseResource> void doReindex(ResourceTable theResourceTable, T theResource) {
|
||||
RuntimeResourceDefinition resourceDefinition = myContext.getResourceDefinition(theResource.getClass());
|
||||
Class<T> resourceClass = (Class<T>) resourceDefinition.getImplementingClass();
|
||||
final IFhirResourceDao<T> dao = myDaoRegistry.getResourceDao(resourceClass);
|
||||
dao.reindex(theResource, theResourceTable);
|
||||
|
||||
myCounter.incrementAndGet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date call() {
|
||||
Throwable reindexFailure;
|
||||
|
||||
try {
|
||||
reindexFailure = myTxTemplate.execute(t -> {
|
||||
ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new);
|
||||
myUpdated = resourceTable.getUpdatedDate();
|
||||
|
||||
try {
|
||||
/*
|
||||
* This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id"
|
||||
*/
|
||||
ForcedId forcedId = resourceTable.getForcedId();
|
||||
if (forcedId != null) {
|
||||
if (isBlank(forcedId.getResourceType())) {
|
||||
ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), resourceTable.getResourceType());
|
||||
forcedId.setResourceType(resourceTable.getResourceType());
|
||||
myForcedIdDao.save(forcedId);
|
||||
}
|
||||
}
|
||||
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceTable.getResourceType());
|
||||
long expectedVersion = resourceTable.getVersion();
|
||||
IBaseResource resource = dao.readByPid(new ResourcePersistentId(resourceTable.getId()), true);
|
||||
|
||||
if (resource == null) {
|
||||
throw new InternalErrorException("Could not find resource version " + resourceTable.getIdDt().toUnqualified().getValue() + " in database");
|
||||
}
|
||||
|
||||
Long actualVersion = resource.getIdElement().getVersionIdPartAsLong();
|
||||
if (actualVersion < expectedVersion) {
|
||||
ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion);
|
||||
myResourceHistoryTableDao.updateVersion(resourceTable.getId(), actualVersion, expectedVersion);
|
||||
}
|
||||
|
||||
doReindex(resourceTable, resource);
|
||||
|
||||
return null;
|
||||
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e.toString(), e);
|
||||
t.setRollbackOnly();
|
||||
return e;
|
||||
}
|
||||
});
|
||||
|
||||
reindexFailure = readResourceAndReindex();
|
||||
} catch (ResourceVersionConflictException e) {
|
||||
/*
|
||||
* We reindex in multiple threads, so it's technically possible that two threads try
|
||||
|
@ -603,5 +520,26 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
|
||||
return myUpdated;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Throwable readResourceAndReindex() {
|
||||
Throwable reindexFailure;
|
||||
reindexFailure = myTxTemplate.execute(t -> {
|
||||
ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new);
|
||||
myUpdated = resourceTable.getUpdatedDate();
|
||||
|
||||
try {
|
||||
myResourceReindexer.reindexResourceEntity(resourceTable);
|
||||
myCounter.incrementAndGet();
|
||||
return null;
|
||||
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e, e);
|
||||
t.setRollbackOnly();
|
||||
return e;
|
||||
}
|
||||
});
|
||||
return reindexFailure;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public final class MultiUrlJobParameterUtil {
|
||||
private MultiUrlJobParameterUtil() {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String... theUrls) {
|
||||
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
|
||||
for (String url : theUrls) {
|
||||
partitionedUrls.add(new PartitionedUrl(url, RequestPartitionId.defaultPartition()));
|
||||
}
|
||||
|
||||
RequestListJson requestListJson = new RequestListJson();
|
||||
requestListJson.setPartitionedUrls(partitionedUrls);
|
||||
return ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_REINDEX, 2401, requestListJson);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class BatchDateThresholdUpdaterTest {
|
||||
static Date LATE_DATE = new Date();
|
||||
static Date EARLY_DATE = new Date(LATE_DATE.getTime() - 1000);
|
||||
static Long PID1 = 1L;
|
||||
static Long PID2 = 2L;
|
||||
static Long PID3 = 3L;
|
||||
BatchDateThresholdUpdater mySvc = new BatchDateThresholdUpdater();
|
||||
|
||||
@Test
|
||||
public void testEmptyList() {
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, Collections.emptySet(), Collections.emptyList());
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void oneItem() {
|
||||
mySvc.setDateFromPid(pid -> LATE_DATE);
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Collections.singletonList(PID1));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void twoItemsSameDate() {
|
||||
mySvc.setDateFromPid(pid -> LATE_DATE);
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID1, PID2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void twoItemsDiffDate() {
|
||||
List<Date> dates = Arrays.asList(EARLY_DATE, LATE_DATE);
|
||||
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void threeItemsSameDate() {
|
||||
mySvc.setDateFromPid(pid -> LATE_DATE);
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID1, PID2, PID3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void threeItemsDifferentEEL() {
|
||||
List<Date> dates = Arrays.asList(EARLY_DATE, EARLY_DATE, LATE_DATE);
|
||||
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void threeItemsDifferentELL() {
|
||||
List<Date> dates = Arrays.asList(EARLY_DATE, LATE_DATE, LATE_DATE);
|
||||
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID2, PID3));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void threeItemsDifferentLEE() {
|
||||
List<Date> dates = Arrays.asList(LATE_DATE, EARLY_DATE, EARLY_DATE);
|
||||
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
|
||||
assertEquals(EARLY_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID2, PID3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void threeItemsDifferentLLE() {
|
||||
List<Date> dates = Arrays.asList(LATE_DATE, LATE_DATE, EARLY_DATE);
|
||||
mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1));
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3));
|
||||
assertEquals(EARLY_DATE, newThreshold);
|
||||
assertThat(seenPids, contains(PID3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void oneHundredItemsSameDate() {
|
||||
mySvc.setDateFromPid(pid -> LATE_DATE);
|
||||
Set<Long> seenPids = new HashSet<>();
|
||||
List<Long> bigList = new ArrayList<>();
|
||||
for (int i = 0; i < 100; ++i) {
|
||||
bigList.add((long) i);
|
||||
}
|
||||
Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, bigList);
|
||||
assertEquals(LATE_DATE, newThreshold);
|
||||
assertThat(seenPids, hasSize(100));
|
||||
}
|
||||
}
|
|
@ -5,8 +5,9 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -39,6 +40,7 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ReverseCronologicalBatchResourcePidReaderTest {
|
||||
private static final int BATCH_SIZE = 3;
|
||||
static FhirContext ourFhirContext = FhirContext.forR4Cached();
|
||||
static String URL_A = "a";
|
||||
static String URL_B = "b";
|
||||
|
@ -54,9 +56,14 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
IFhirResourceDao<Patient> myPatientDao;
|
||||
private final RequestPartitionId myDefaultPartitionId = RequestPartitionId.defaultPartition();
|
||||
@Mock
|
||||
private IResultIterator myResultIter;
|
||||
|
||||
@InjectMocks
|
||||
ReverseCronologicalBatchResourcePidReader myReader = new ReverseCronologicalBatchResourcePidReader();
|
||||
@Mock
|
||||
private BatchResourceSearcher myBatchResourceSearcher;
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws JsonProcessingException {
|
||||
|
@ -65,17 +72,20 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
ObjectMapper mapper = new ObjectMapper();
|
||||
String requestListJsonString = mapper.writeValueAsString(requestListJson);
|
||||
myReader.setRequestListJson(requestListJsonString);
|
||||
myReader.setBatchSize(BATCH_SIZE);
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
RuntimeResourceDefinition patientResDef = ourFhirContext.getResourceDefinition("Patient");
|
||||
when(myMatchUrlService.getResourceSearch(URL_A)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myMatchUrlService.getResourceSearch(URL_B)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myMatchUrlService.getResourceSearch(URL_C)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myMatchUrlService.getResourceSearch(URL_A, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
|
||||
when(myMatchUrlService.getResourceSearch(URL_B, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
|
||||
when(myMatchUrlService.getResourceSearch(URL_C, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId));
|
||||
when(myDaoRegistry.getResourceDao("Patient")).thenReturn(myPatientDao);
|
||||
myPatient = new Patient();
|
||||
when(myPatientDao.readByPid(any())).thenReturn(myPatient);
|
||||
Calendar cal = new GregorianCalendar(2021, 1, 1);
|
||||
myPatient.getMeta().setLastUpdated(cal.getTime());
|
||||
|
||||
when(myBatchResourceSearcher.performSearch(any(), any())).thenReturn(myResultIter);
|
||||
}
|
||||
|
||||
private Set<ResourcePersistentId> buildPidSet(Integer... thePids) {
|
||||
|
@ -87,7 +97,7 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
|
||||
@Test
|
||||
public void test3x1() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
when(myResultIter.getNextResultBatch(BATCH_SIZE))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(buildPidSet(4, 5, 6))
|
||||
|
@ -101,10 +111,30 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
assertNull(myReader.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadRepeat() throws Exception {
|
||||
when(myResultIter.getNextResultBatch(BATCH_SIZE))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(buildPidSet(2, 3, 4))
|
||||
.thenReturn(buildPidSet(4, 5))
|
||||
.thenReturn(emptySet);
|
||||
|
||||
when(myResultIter.hasNext())
|
||||
.thenReturn(true)
|
||||
.thenReturn(true)
|
||||
.thenReturn(true)
|
||||
.thenReturn(true)
|
||||
.thenReturn(false);
|
||||
|
||||
assertListEquals(myReader.read(), 1, 2, 3);
|
||||
assertListEquals(myReader.read(), 4, 5);
|
||||
assertNull(myReader.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test1x3start() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
when(myResultIter.getNextResultBatch(BATCH_SIZE))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(buildPidSet(4, 5, 6))
|
||||
.thenReturn(buildPidSet(7, 8))
|
||||
|
@ -120,7 +150,7 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
|
||||
@Test
|
||||
public void test1x3end() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
when(myResultIter.getNextResultBatch(BATCH_SIZE))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
|
@ -140,6 +170,4 @@ class ReverseCronologicalBatchResourcePidReaderTest {
|
|||
assertEquals(theList.get(i), Long.valueOf(theValues[i]));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
|||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.stresstest.GiantTransactionPerfTest;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
|
@ -110,6 +109,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.lenient;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@TestPropertySource(properties = {
|
||||
|
@ -240,8 +240,9 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
when(mySrd.getInterceptorBroadcaster()).thenReturn(mySrdInterceptorService);
|
||||
when(mySrd.getUserData()).thenReturn(new HashMap<>());
|
||||
when(mySrd.getHeaders(eq(JpaConstants.HEADER_META_SNAPSHOT_MODE))).thenReturn(new ArrayList<>());
|
||||
when(mySrd.getServer().getDefaultPageSize()).thenReturn(null);
|
||||
when(mySrd.getServer().getMaximumPageSize()).thenReturn(null);
|
||||
// TODO enforce strict mocking everywhere
|
||||
lenient().when(mySrd.getServer().getDefaultPageSize()).thenReturn(null);
|
||||
lenient().when(mySrd.getServer().getMaximumPageSize()).thenReturn(null);
|
||||
}
|
||||
|
||||
protected CountDownLatch registerLatchHookInterceptor(int theCount, Pointcut theLatchPointcut) {
|
||||
|
|
|
@ -2,20 +2,15 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestDstu3Config;
|
||||
import ca.uhn.test.concurrency.PointcutLatch;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -27,24 +22,15 @@ import static org.hamcrest.Matchers.isOneOf;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestDstu3Config.class})
|
||||
public class PartitionRunnerTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(PartitionRunnerTest.class);
|
||||
private static final String EXPUNGE_THREADNAME_1 = "expunge-1";
|
||||
private static final String EXPUNGE_THREADNAME_2 = "expunge-2";
|
||||
private static final String TEST_THREADNAME_1 = "test-1";
|
||||
private static final String TEST_THREADNAME_2 = "test-2";
|
||||
|
||||
@Autowired
|
||||
private PartitionRunner myPartitionRunner;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private PointcutLatch myLatch = new PointcutLatch("partition call");
|
||||
private final PointcutLatch myLatch = new PointcutLatch("partition call");
|
||||
|
||||
@AfterEach
|
||||
public void before() {
|
||||
myDaoConfig.setExpungeThreadCount(new DaoConfig().getExpungeThreadCount());
|
||||
myDaoConfig.setExpungeBatchSize(new DaoConfig().getExpungeBatchSize());
|
||||
myLatch.clear();
|
||||
}
|
||||
|
||||
|
@ -53,10 +39,23 @@ public class PartitionRunnerTest {
|
|||
Slice<Long> resourceIds = buildSlice(0);
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(0);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
myLatch.clear();
|
||||
}
|
||||
|
||||
private PartitionRunner getPartitionRunner() {
|
||||
return getPartitionRunner(DaoConfig.DEFAULT_EXPUNGE_BATCH_SIZE);
|
||||
}
|
||||
|
||||
private PartitionRunner getPartitionRunner(int theBatchSize) {
|
||||
return getPartitionRunner(theBatchSize, Runtime.getRuntime().availableProcessors());
|
||||
}
|
||||
|
||||
private PartitionRunner getPartitionRunner(int theBatchSize, int theThreadCount) {
|
||||
return new PartitionRunner("TEST", "test", theBatchSize, theThreadCount);
|
||||
}
|
||||
|
||||
private Slice<Long> buildSlice(int size) {
|
||||
List<Long> list = new ArrayList<>();
|
||||
for (long i = 0; i < size; ++i) {
|
||||
|
@ -71,7 +70,7 @@ public class PartitionRunnerTest {
|
|||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(1);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||
assertEquals("main", partitionCall.threadName);
|
||||
assertEquals(1, partitionCall.size);
|
||||
|
@ -84,7 +83,7 @@ public class PartitionRunnerTest {
|
|||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(1);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||
assertEquals("main", partitionCall.threadName);
|
||||
assertEquals(2, partitionCall.size);
|
||||
|
@ -93,17 +92,16 @@ public class PartitionRunnerTest {
|
|||
@Test
|
||||
public void tenItemsBatch5() throws InterruptedException {
|
||||
Slice<Long> resourceIds = buildSlice(10);
|
||||
myDaoConfig.setExpungeBatchSize(5);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertEquals(5, partitionCall1.size);
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertEquals(5, partitionCall2.size);
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
|
@ -111,7 +109,6 @@ public class PartitionRunnerTest {
|
|||
@Test
|
||||
public void nineItemsBatch5() throws InterruptedException {
|
||||
Slice<Long> resourceIds = buildSlice(9);
|
||||
myDaoConfig.setExpungeBatchSize(5);
|
||||
|
||||
// We don't care in which order, but one partition size should be
|
||||
// 5 and one should be 4
|
||||
|
@ -119,13 +116,13 @@ public class PartitionRunnerTest {
|
|||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertEquals(true, nums.remove(partitionCall1.size));
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertEquals(true, nums.remove(partitionCall2.size));
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
|
@ -133,21 +130,19 @@ public class PartitionRunnerTest {
|
|||
@Test
|
||||
public void tenItemsOneThread() throws InterruptedException {
|
||||
Slice<Long> resourceIds = buildSlice(10);
|
||||
myDaoConfig.setExpungeBatchSize(5);
|
||||
myDaoConfig.setExpungeThreadCount(1);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
getPartitionRunner(5, 1).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
{
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName);
|
||||
assertEquals(TEST_THREADNAME_1, partitionCall.threadName);
|
||||
assertEquals(5, partitionCall.size);
|
||||
}
|
||||
{
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName);
|
||||
assertEquals(TEST_THREADNAME_1, partitionCall.threadName);
|
||||
assertEquals(5, partitionCall.size);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -675,7 +675,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
Throwable t = next.get();
|
||||
if (t != null) {
|
||||
String stackTrace = ExceptionUtils.getStackTrace(t);
|
||||
fail(t.toString() + "\n" + stackTrace);
|
||||
fail(t + "\n" + stackTrace);
|
||||
}
|
||||
}
|
||||
executor.shutdownNow();
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import com.github.jsonldjava.shaded.com.google.common.collect.Lists;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public final class DeleteExpungeJobParameterUtil {
|
||||
private DeleteExpungeJobParameterUtil() {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String... theUrls) {
|
||||
List<RequestPartitionId> requestPartitionIds = new ArrayList<>();
|
||||
for (int i = 0; i < theUrls.length; ++i) {
|
||||
requestPartitionIds.add(RequestPartitionId.defaultPartition());
|
||||
}
|
||||
return DeleteExpungeJobConfig.buildJobParameters(2401, Lists.newArrayList(theUrls), requestPartitionIds);
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.delete.job;
|
|||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
|
@ -50,7 +51,7 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
|||
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
|
||||
JobParameters jobParameters = DeleteExpungeJobParameterUtil.buildJobParameters("Observation?subject.active=false", "Patient?active=false");
|
||||
JobParameters jobParameters = MultiUrlJobParameterUtil.buildJobParameters("Observation?subject.active=false", "Patient?active=false");
|
||||
|
||||
// execute
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -16,13 +19,14 @@ import org.springframework.batch.core.JobParametersInvalidException;
|
|||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DeleteExpungeJobParameterValidatorTest {
|
||||
class MultiUrlJobParameterValidatorTest {
|
||||
static final FhirContext ourFhirContext = FhirContext.forR4Cached();
|
||||
|
||||
@Mock
|
||||
|
@ -30,32 +34,32 @@ class DeleteExpungeJobParameterValidatorTest {
|
|||
@Mock
|
||||
DaoRegistry myDaoRegistry;
|
||||
|
||||
DeleteExpungeJobParameterValidator mySvc;
|
||||
MultiUrlJobParameterValidator mySvc;
|
||||
|
||||
@BeforeEach
|
||||
public void initMocks() {
|
||||
mySvc = new DeleteExpungeJobParameterValidator(myMatchUrlService, myDaoRegistry);
|
||||
mySvc = new MultiUrlJobParameterValidator(myMatchUrlService, myDaoRegistry);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidate() throws JobParametersInvalidException, JsonProcessingException {
|
||||
// setup
|
||||
JobParameters parameters = DeleteExpungeJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith");
|
||||
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap());
|
||||
when(myMatchUrlService.getResourceSearch(anyString())).thenReturn(resourceSearch);
|
||||
JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith");
|
||||
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition());
|
||||
when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch);
|
||||
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true);
|
||||
|
||||
// execute
|
||||
mySvc.validate(parameters);
|
||||
// verify
|
||||
verify(myMatchUrlService, times(2)).getResourceSearch(anyString());
|
||||
verify(myMatchUrlService, times(2)).getResourceSearch(anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateBadType() throws JobParametersInvalidException, JsonProcessingException {
|
||||
JobParameters parameters = DeleteExpungeJobParameterUtil.buildJobParameters("Patient?address=memory");
|
||||
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap());
|
||||
when(myMatchUrlService.getResourceSearch(anyString())).thenReturn(resourceSearch);
|
||||
JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory");
|
||||
ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition());
|
||||
when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch);
|
||||
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(false);
|
||||
|
||||
try {
|
|
@ -0,0 +1,121 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class ReindexJobTest extends BaseJpaR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReindexJobTest.class);
|
||||
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.REINDEX_JOB_NAME)
|
||||
private Job myReindexJob;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME)
|
||||
private Job myReindexEverythingJob;
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
private ReindexTestHelper myReindexTestHelper;
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
myReindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindexJob() throws Exception {
|
||||
// setup
|
||||
|
||||
IIdType obsFinalId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL);
|
||||
IIdType obsCancelledId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED);
|
||||
|
||||
myReindexTestHelper.createAlleleSearchParameter();
|
||||
|
||||
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
// The searchparam value is on the observation, but it hasn't been indexed yet
|
||||
assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(0));
|
||||
|
||||
// Only reindex one of them
|
||||
JobParameters jobParameters = MultiUrlJobParameterUtil.buildJobParameters("Observation?status=final");
|
||||
|
||||
// execute
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myReindexJob, jobParameters);
|
||||
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
|
||||
// validate
|
||||
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
// Now one of them should be indexed
|
||||
List<String> alleleObservationIds = myReindexTestHelper.getAlleleObservationIds();
|
||||
assertThat(alleleObservationIds, hasSize(1));
|
||||
assertEquals(obsFinalId.getIdPart(), alleleObservationIds.get(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindexEverythingJob() throws Exception {
|
||||
// setup
|
||||
|
||||
for (int i = 0; i < 50; ++i) {
|
||||
myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL);
|
||||
}
|
||||
|
||||
myReindexTestHelper.createAlleleSearchParameter();
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
// The searchparam value is on the observation, but it hasn't been indexed yet
|
||||
assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(0));
|
||||
|
||||
JobParameters jobParameters = buildEverythingJobParameters(3L);
|
||||
|
||||
// execute
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myReindexEverythingJob, jobParameters);
|
||||
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
|
||||
// validate
|
||||
assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
// Now all of them should be indexed
|
||||
assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(50));
|
||||
}
|
||||
|
||||
private JobParameters buildEverythingJobParameters(Long theBatchSize) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
public class ReindexTestHelper {
|
||||
public static final String ALLELE_EXTENSION_URL = "http://hl7.org/fhir/StructureDefinition/observation-geneticsAlleleName";
|
||||
public static final String ALLELE_SP_CODE = "alleleName";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReindexTestHelper.class);
|
||||
private static final String TEST_ALLELE_VALUE = "HERC";
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
private final ISearchParamRegistry mySearchParamRegistry;
|
||||
private final IFhirResourceDao<SearchParameter> mySearchParameterDao;
|
||||
private final IFhirResourceDao<Observation> myObservationDao;
|
||||
|
||||
public ReindexTestHelper(FhirContext theFhirContext, DaoRegistry theDaoRegistry, ISearchParamRegistry theSearchParamRegistry) {
|
||||
myFhirContext = theFhirContext;
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
mySearchParameterDao = myDaoRegistry.getResourceDao(SearchParameter.class);
|
||||
myObservationDao = myDaoRegistry.getResourceDao(Observation.class);
|
||||
}
|
||||
|
||||
public void createAlleleSearchParameter() {
|
||||
createAlleleSearchParameter(ALLELE_SP_CODE);
|
||||
}
|
||||
|
||||
public void createAlleleSearchParameter(String theCode) {
|
||||
SearchParameter alleleName = new SearchParameter();
|
||||
alleleName.setId("SearchParameter/alleleName");
|
||||
alleleName.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
alleleName.addBase("Observation");
|
||||
alleleName.setCode(theCode);
|
||||
alleleName.setType(Enumerations.SearchParamType.TOKEN);
|
||||
alleleName.setTitle("AlleleName");
|
||||
alleleName.setExpression("Observation.extension('" + ALLELE_EXTENSION_URL + "')");
|
||||
alleleName.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||
mySearchParameterDao.create(alleleName);
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
}
|
||||
|
||||
public IIdType createObservationWithAlleleExtension(Observation.ObservationStatus theStatus) {
|
||||
Observation observation = buildObservationWithAlleleExtension(theStatus);
|
||||
return myObservationDao.create(observation).getId();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Observation buildObservationWithAlleleExtension(Observation.ObservationStatus theStatus) {
|
||||
Observation observation = new Observation();
|
||||
observation.addExtension(ALLELE_EXTENSION_URL, new StringType(TEST_ALLELE_VALUE));
|
||||
observation.setStatus(theStatus);
|
||||
return observation;
|
||||
}
|
||||
|
||||
public List<String> getAlleleObservationIds() {
|
||||
return getAlleleObservationIds(ALLELE_SP_CODE, null);
|
||||
}
|
||||
|
||||
public List<String> getAlleleObservationIds(String theCode, String theIdentifier) {
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.add(theCode, new TokenParam(TEST_ALLELE_VALUE));
|
||||
if (theIdentifier != null) {
|
||||
map.add(Observation.SP_IDENTIFIER, new TokenParam(theIdentifier));
|
||||
}
|
||||
ourLog.info("Searching with url {}", map.toNormalizedQueryString(myFhirContext));
|
||||
IBundleProvider result = myObservationDao.search(map);
|
||||
return result.getAllResourceIds();
|
||||
}
|
||||
|
||||
public IBaseResource buildObservationWithAlleleExtension() {
|
||||
return buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL);
|
||||
}
|
||||
|
||||
public List<String> getAlleleObservationIds(IGenericClient theClient) {
|
||||
IBaseBundle result = theClient.search()
|
||||
.forResource("Observation")
|
||||
.where(new StringClientParam(ALLELE_SP_CODE).matches().value(TEST_ALLELE_VALUE))
|
||||
.cacheControl(new CacheControlDirective().setNoCache(true))
|
||||
.execute();
|
||||
return BundleUtil.toListOfResourceIds(myFhirContext, result);
|
||||
}
|
||||
}
|
|
@ -17,7 +17,6 @@ import ca.uhn.fhir.rest.param.TokenParam;
|
|||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
|
@ -32,7 +31,6 @@ import org.junit.jupiter.api.Test;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -347,7 +345,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
|
|||
|
||||
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
|
||||
logAllResources();
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId()));
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
|
||||
});
|
||||
|
||||
assertThat(resourcesByType.get("Patient"), contains(4267));
|
||||
|
@ -382,7 +380,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
|
|||
|
||||
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
|
||||
logAllResources();
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId()));
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
|
||||
});
|
||||
|
||||
assertThat(resourcesByType.get("Patient"), contains(4267));
|
||||
|
@ -430,7 +428,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
|
|||
|
||||
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
|
||||
logAllResources();
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId()));
|
||||
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
|
||||
});
|
||||
|
||||
assertThat(resourcesByType.get("Patient"), contains(4267));
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.server.RestfulServer;
|
|||
import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
|
||||
import ca.uhn.fhir.rest.server.provider.ReindexProvider;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
|
@ -77,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
protected IPartitionDao myPartitionDao;
|
||||
@Autowired
|
||||
private DeleteExpungeProvider myDeleteExpungeProvider;
|
||||
@Autowired
|
||||
private ReindexProvider myReindexProvider;
|
||||
|
||||
ResourceCountCache myResourceCountsCache;
|
||||
private TerminologyUploaderProvider myTerminologyUploaderProvider;
|
||||
|
@ -109,7 +112,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProvider.class);
|
||||
myDaoRegistry = myAppCtx.getBean(DaoRegistry.class);
|
||||
|
||||
ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider);
|
||||
ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider, myReindexProvider);
|
||||
ourRestServer.registerProvider(myAppCtx.getBean(GraphQLProvider.class));
|
||||
ourRestServer.registerProvider(myAppCtx.getBean(DiffProvider.class));
|
||||
|
||||
|
|
|
@ -0,0 +1,230 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.IPointcut;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.delete.job.ReindexTestHelper;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.DecimalType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.DEFAULT_PARTITION_NAME;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.isA;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProviderR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MultitenantBatchOperationR4Test.class);
|
||||
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@BeforeEach
|
||||
@Override
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
myDaoConfig.setExpungeEnabled(true);
|
||||
myDaoConfig.setDeleteExpungeEnabled(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
@Override
|
||||
public void after() throws Exception {
|
||||
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
|
||||
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
|
||||
myDaoConfig.setDeleteExpungeEnabled(new DaoConfig().isDeleteExpungeEnabled());
|
||||
super.after();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteExpungeOperation() {
|
||||
// Create patients
|
||||
|
||||
IIdType idAT = createPatient(withTenant(TENANT_A), withActiveTrue());
|
||||
IIdType idAF = createPatient(withTenant(TENANT_A), withActiveFalse());
|
||||
IIdType idBT = createPatient(withTenant(TENANT_B), withActiveTrue());
|
||||
IIdType idBF = createPatient(withTenant(TENANT_B), withActiveFalse());
|
||||
|
||||
// validate setup
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal());
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_B).getTotal());
|
||||
assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal());
|
||||
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false");
|
||||
|
||||
MyInterceptor interceptor = new MyInterceptor();
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PARTITION_SELECTED, interceptor);
|
||||
// execute
|
||||
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
Parameters response = myClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(ProviderConstants.OPERATION_DELETE_EXPUNGE)
|
||||
.withParameters(input)
|
||||
.execute();
|
||||
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME);
|
||||
assertThat(interceptor.requestPartitionIds, hasSize(1));
|
||||
RequestPartitionId partitionId = interceptor.requestPartitionIds.get(0);
|
||||
assertEquals(TENANT_B_ID, partitionId.getFirstPartitionIdOrNull());
|
||||
assertEquals(TENANT_B, partitionId.getFirstPartitionNameOrNull());
|
||||
assertThat(interceptor.requestDetails.get(0), isA(ServletRequestDetails.class));
|
||||
assertEquals("Patient", interceptor.resourceDefs.get(0).getName());
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
||||
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
|
||||
Long jobId = jobIdPrimitive.getValue().longValue();
|
||||
|
||||
assertEquals(1, myBatchJobHelper.getReadCount(jobId));
|
||||
assertEquals(1, myBatchJobHelper.getWriteCount(jobId));
|
||||
|
||||
// validate only the false patient in TENANT_B is removed
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal());
|
||||
assertEquals(1, getAllPatientsInTenant(TENANT_B).getTotal());
|
||||
assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindexEverything() {
|
||||
ReindexTestHelper reindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry);
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
IIdType obsFinalA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension());
|
||||
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
IIdType obsFinalB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension());
|
||||
|
||||
reindexTestHelper.createAlleleSearchParameter();
|
||||
|
||||
// The searchparam value is on the observation, but it hasn't been indexed yet
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
// setup
|
||||
Parameters input = new Parameters();
|
||||
Integer batchSize = 2401;
|
||||
input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize));
|
||||
input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_EVERYTHING, new BooleanType(true));
|
||||
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
|
||||
|
||||
// reindex all of Tenant A
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
Parameters response = myClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(ProviderConstants.OPERATION_REINDEX)
|
||||
.withParameters(input)
|
||||
.execute();
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
||||
|
||||
myBatchJobHelper.awaitJobExecution(jobId.getValueAsNumber().longValue());
|
||||
|
||||
// validate
|
||||
List<String> alleleObservationIds = reindexTestHelper.getAlleleObservationIds(myClient);
|
||||
// Only the one in the first tenant should be indexed
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(1));
|
||||
assertEquals(obsFinalA.getIdPart(), alleleObservationIds.get(0));
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindexByUrl() {
|
||||
ReindexTestHelper reindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry);
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
IIdType obsFinalA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL));
|
||||
IIdType obsCancelledA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED));
|
||||
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
IIdType obsFinalB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL));
|
||||
IIdType obsCancelledB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED));
|
||||
|
||||
reindexTestHelper.createAlleleSearchParameter();
|
||||
|
||||
// The searchparam value is on the observation, but it hasn't been indexed yet
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
|
||||
// setup
|
||||
Parameters input = new Parameters();
|
||||
Integer batchSize = 2401;
|
||||
input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize));
|
||||
input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_URL, "Observation?status=final");
|
||||
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
|
||||
|
||||
// Reindex Tenant A by query url
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
Parameters response = myClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(ProviderConstants.OPERATION_REINDEX)
|
||||
.withParameters(input)
|
||||
.execute();
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
||||
|
||||
myBatchJobHelper.awaitJobExecution(jobId.getValueAsNumber().longValue());
|
||||
|
||||
// validate
|
||||
List<String> alleleObservationIds = reindexTestHelper.getAlleleObservationIds(myClient);
|
||||
// Only the one in the first tenant should be indexed
|
||||
myTenantClientInterceptor.setTenantId(TENANT_A);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(1));
|
||||
assertEquals(obsFinalA.getIdPart(), alleleObservationIds.get(0));
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0));
|
||||
}
|
||||
|
||||
private Bundle getAllPatientsInTenant(String theTenantId) {
|
||||
myTenantClientInterceptor.setTenantId(theTenantId);
|
||||
|
||||
return myClient.search().forResource("Patient").cacheControl(new CacheControlDirective().setNoCache(true)).returnBundle(Bundle.class).execute();
|
||||
}
|
||||
|
||||
private static class MyInterceptor implements IAnonymousInterceptor {
|
||||
public List<RequestPartitionId> requestPartitionIds = new ArrayList<>();
|
||||
public List<RequestDetails> requestDetails = new ArrayList<>();
|
||||
public List<RuntimeResourceDefinition> resourceDefs = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
requestPartitionIds.add(theArgs.get(RequestPartitionId.class));
|
||||
requestDetails.add(theArgs.get(RequestDetails.class));
|
||||
resourceDefs.add(theArgs.get(RuntimeResourceDefinition.class));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,134 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.IPointcut;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.DecimalType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.DEFAULT_PARTITION_NAME;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.isA;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class MultitenantDeleteExpungeR4Test extends BaseMultitenantResourceProviderR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MultitenantDeleteExpungeR4Test.class);
|
||||
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@BeforeEach
|
||||
@Override
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
myDaoConfig.setExpungeEnabled(true);
|
||||
myDaoConfig.setDeleteExpungeEnabled(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
@Override
|
||||
public void after() throws Exception {
|
||||
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
|
||||
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
|
||||
myDaoConfig.setDeleteExpungeEnabled(new DaoConfig().isDeleteExpungeEnabled());
|
||||
super.after();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteExpungeOperation() {
|
||||
// Create patients
|
||||
|
||||
IIdType idAT = createPatient(withTenant(TENANT_A), withActiveTrue());
|
||||
IIdType idAF = createPatient(withTenant(TENANT_A), withActiveFalse());
|
||||
IIdType idBT = createPatient(withTenant(TENANT_B), withActiveTrue());
|
||||
IIdType idBF = createPatient(withTenant(TENANT_B), withActiveFalse());
|
||||
|
||||
// validate setup
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal());
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_B).getTotal());
|
||||
assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal());
|
||||
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false");
|
||||
|
||||
MyInterceptor interceptor = new MyInterceptor();
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PARTITION_SELECTED, interceptor);
|
||||
// execute
|
||||
|
||||
myTenantClientInterceptor.setTenantId(TENANT_B);
|
||||
Parameters response = myClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(ProviderConstants.OPERATION_DELETE_EXPUNGE)
|
||||
.withParameters(input)
|
||||
.execute();
|
||||
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME);
|
||||
assertThat(interceptor.requestPartitionIds, hasSize(3));
|
||||
interceptor.requestPartitionIds.forEach(id -> assertEquals(TENANT_B_ID, id.getFirstPartitionIdOrNull()));
|
||||
interceptor.requestPartitionIds.forEach(id -> assertEquals(TENANT_B, id.getFirstPartitionNameOrNull()));
|
||||
assertThat(interceptor.requestDetails.get(0), isA(ServletRequestDetails.class));
|
||||
assertThat(interceptor.requestDetails.get(1), isA(SystemRequestDetails.class));
|
||||
assertThat(interceptor.requestDetails.get(2), isA(SystemRequestDetails.class));
|
||||
assertEquals("Patient", interceptor.resourceDefs.get(0).getName());
|
||||
assertEquals("Patient", interceptor.resourceDefs.get(1).getName());
|
||||
assertEquals("Patient", interceptor.resourceDefs.get(2).getName());
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
||||
DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID);
|
||||
Long jobId = jobIdPrimitive.getValue().longValue();
|
||||
|
||||
assertEquals(1, myBatchJobHelper.getReadCount(jobId));
|
||||
assertEquals(1, myBatchJobHelper.getWriteCount(jobId));
|
||||
|
||||
// validate only the false patient in TENANT_B is removed
|
||||
assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal());
|
||||
assertEquals(1, getAllPatientsInTenant(TENANT_B).getTotal());
|
||||
assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal());
|
||||
|
||||
}
|
||||
|
||||
private Bundle getAllPatientsInTenant(String theTenantId) {
|
||||
myTenantClientInterceptor.setTenantId(theTenantId);
|
||||
|
||||
return myClient.search().forResource("Patient").cacheControl(new CacheControlDirective().setNoCache(true)).returnBundle(Bundle.class).execute();
|
||||
}
|
||||
|
||||
private static class MyInterceptor implements IAnonymousInterceptor {
|
||||
public List<RequestPartitionId> requestPartitionIds = new ArrayList<>();
|
||||
public List<RequestDetails> requestDetails = new ArrayList<>();
|
||||
public List<RuntimeResourceDefinition> resourceDefs = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
requestPartitionIds.add(theArgs.get(RequestPartitionId.class));
|
||||
requestDetails.add(theArgs.get(RequestDetails.class));
|
||||
resourceDefs.add(theArgs.get(RuntimeResourceDefinition.class));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -13,17 +13,20 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
|||
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
@ -50,16 +53,15 @@ import static org.mockito.Mockito.verify;
|
|||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
||||
|
||||
private static FhirContext ourCtx = FhirContext.forCached(FhirVersionEnum.R4);
|
||||
private static final FhirContext ourCtx = FhirContext.forCached(FhirVersionEnum.R4);
|
||||
|
||||
@Mock
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
||||
private ResourceReindexingSvcImpl mySvc;
|
||||
private DaoConfig myDaoConfig;
|
||||
private final DaoConfig myDaoConfig = new DaoConfig();
|
||||
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
@ -88,6 +90,10 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
private TransactionStatus myTxStatus;
|
||||
@Mock
|
||||
private ISchedulerService mySchedulerService;
|
||||
@InjectMocks
|
||||
private final ResourceReindexer myResourceReindexer = new ResourceReindexer(ourCtx);
|
||||
@InjectMocks
|
||||
private final ResourceReindexingSvcImpl mySvc = new ResourceReindexingSvcImpl();
|
||||
|
||||
@Override
|
||||
protected FhirContext getContext() {
|
||||
|
@ -101,22 +107,12 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myDaoConfig = new DaoConfig();
|
||||
myDaoConfig.setReindexThreadCount(2);
|
||||
|
||||
mySvc = new ResourceReindexingSvcImpl();
|
||||
mySvc.setContextForUnitTest(ourCtx);
|
||||
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
||||
mySvc.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||
mySvc.setForcedIdDaoForUnitTest(myForcedIdDao);
|
||||
mySvc.setReindexJobDaoForUnitTest(myReindexJobDao);
|
||||
mySvc.setResourceTableDaoForUnitTest(myResourceTableDao);
|
||||
mySvc.setTxManagerForUnitTest(myTxManager);
|
||||
mySvc.setSearchParamRegistryForUnitTest(mySearchParamRegistry);
|
||||
mySvc.setSchedulerServiceForUnitTest(mySchedulerService);
|
||||
mySvc.setResourceReindexerForUnitTest(myResourceReindexer);
|
||||
mySvc.start();
|
||||
|
||||
when(myTxManager.getTransaction(any())).thenReturn(myTxStatus);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -157,7 +153,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
public void testMarkAsDeletedIfNothingIndexed() {
|
||||
mockNothingToExpunge();
|
||||
mockSingleReindexingJob(null);
|
||||
mockFetchFourResources();
|
||||
// Mock resource fetch
|
||||
List<Long> values = Collections.emptyList();
|
||||
when(myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(any(), any(), any())).thenReturn(new SliceImpl<>(values));
|
||||
|
@ -197,6 +192,8 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
mockSingleReindexingJob(null);
|
||||
mockFourResourcesNeedReindexing();
|
||||
mockFetchFourResources();
|
||||
when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
|
||||
|
||||
int count = mySvc.forceReindexingPass();
|
||||
assertEquals(4, count);
|
||||
|
@ -248,8 +245,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
mockWhenResourceTableFindById(updatedTimes, resourceTypes);
|
||||
when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao);
|
||||
when(myResourceDao.readByPid(any(), anyBoolean())).thenAnswer(t->{
|
||||
int idx = t.getArgument(0, ResourcePersistentId.class).getIdAsLong().intValue();
|
||||
return resources.get(idx);
|
||||
|
@ -277,6 +272,8 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
|
||||
@Test
|
||||
public void testReindexDeletedResource() {
|
||||
// setup
|
||||
when(myTxManager.getTransaction(any())).thenReturn(myTxStatus);
|
||||
mockNothingToExpunge();
|
||||
mockSingleReindexingJob("Patient");
|
||||
// Mock resource fetch
|
||||
|
@ -294,15 +291,13 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
);
|
||||
mockWhenResourceTableFindById(updatedTimes, resourceTypes);
|
||||
when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao);
|
||||
when(myResourceDao.readByPid(any(), anyBoolean())).thenReturn(null);
|
||||
|
||||
|
||||
// execute
|
||||
int count = mySvc.forceReindexingPass();
|
||||
assertEquals(0, count);
|
||||
|
||||
// verify
|
||||
assertEquals(0, count);
|
||||
verify(myResourceTableDao, times(1)).updateIndexStatus(eq(0L), eq(BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED));
|
||||
}
|
||||
|
||||
|
@ -356,8 +351,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
|
|||
new Observation().setId("Observation/3/_history/1")
|
||||
);
|
||||
mockWhenResourceTableFindById(updatedTimes, resourceTypes);
|
||||
when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao);
|
||||
when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao);
|
||||
when(myResourceDao.readByPid(any(), anyBoolean())).thenAnswer(t->{
|
||||
|
|
|
@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.util;
|
|||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
|
@ -18,47 +17,47 @@ import java.util.stream.Collector;
|
|||
public class MultimapCollector<T, K, V> implements
|
||||
Collector<T, ListMultimap<K, V>, ListMultimap<K, V>> {
|
||||
|
||||
private final Function<T, K> keyGetter;
|
||||
private final Function<T, V> valueGetter;
|
||||
private final Function<T, K> keyGetter;
|
||||
private final Function<T, V> valueGetter;
|
||||
|
||||
public MultimapCollector(Function<T, K> keyGetter, Function<T, V> valueGetter) {
|
||||
this.keyGetter = keyGetter;
|
||||
this.valueGetter = valueGetter;
|
||||
}
|
||||
public MultimapCollector(Function<T, K> keyGetter, Function<T, V> valueGetter) {
|
||||
this.keyGetter = keyGetter;
|
||||
this.valueGetter = valueGetter;
|
||||
}
|
||||
|
||||
public static <T, K, V> MultimapCollector<T, K, V> toMultimap(Function<T, K> keyGetter, Function<T, V> valueGetter) {
|
||||
return new MultimapCollector<>(keyGetter, valueGetter);
|
||||
}
|
||||
public static <T, K, V> MultimapCollector<T, K, V> toMultimap(Function<T, K> keyGetter, Function<T, V> valueGetter) {
|
||||
return new MultimapCollector<>(keyGetter, valueGetter);
|
||||
}
|
||||
|
||||
public static <T, K, V> MultimapCollector<T, K, T> toMultimap(Function<T, K> keyGetter) {
|
||||
return new MultimapCollector<>(keyGetter, v -> v);
|
||||
}
|
||||
public static <T, K, V> MultimapCollector<T, K, T> toMultimap(Function<T, K> keyGetter) {
|
||||
return new MultimapCollector<>(keyGetter, v -> v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Supplier<ListMultimap<K, V>> supplier() {
|
||||
return ArrayListMultimap::create;
|
||||
}
|
||||
@Override
|
||||
public Supplier<ListMultimap<K, V>> supplier() {
|
||||
return ArrayListMultimap::create;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BiConsumer<ListMultimap<K, V>, T> accumulator() {
|
||||
return (map, element) -> map.put(keyGetter.apply(element), valueGetter.apply(element));
|
||||
}
|
||||
@Override
|
||||
public BiConsumer<ListMultimap<K, V>, T> accumulator() {
|
||||
return (map, element) -> map.put(keyGetter.apply(element), valueGetter.apply(element));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BinaryOperator<ListMultimap<K, V>> combiner() {
|
||||
return (map1, map2) -> {
|
||||
map1.putAll(map2);
|
||||
return map1;
|
||||
};
|
||||
}
|
||||
@Override
|
||||
public BinaryOperator<ListMultimap<K, V>> combiner() {
|
||||
return (map1, map2) -> {
|
||||
map1.putAll(map2);
|
||||
return map1;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<ListMultimap<K, V>, ListMultimap<K, V>> finisher() {
|
||||
return map -> map;
|
||||
}
|
||||
@Override
|
||||
public Function<ListMultimap<K, V>, ListMultimap<K, V>> finisher() {
|
||||
return map -> map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Characteristics> characteristics() {
|
||||
return ImmutableSet.of(Characteristics.IDENTITY_FINISH);
|
||||
}
|
||||
@Override
|
||||
public Set<Characteristics> characteristics() {
|
||||
return ImmutableSet.of(Characteristics.IDENTITY_FINISH);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.searchparam;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
|
@ -161,11 +162,15 @@ public class MatchUrlService {
|
|||
return ReflectionUtil.newInstance(clazz);
|
||||
}
|
||||
|
||||
public ResourceSearch getResourceSearch(String theUrl) {
|
||||
public ResourceSearch getResourceSearch(String theUrl, RequestPartitionId theRequestPartitionId) {
|
||||
RuntimeResourceDefinition resourceDefinition;
|
||||
resourceDefinition = UrlUtil.parseUrlResourceType(myFhirContext, theUrl);
|
||||
SearchParameterMap searchParameterMap = translateMatchUrl(theUrl, resourceDefinition);
|
||||
return new ResourceSearch(resourceDefinition, searchParameterMap);
|
||||
return new ResourceSearch(resourceDefinition, searchParameterMap, theRequestPartitionId);
|
||||
}
|
||||
|
||||
public ResourceSearch getResourceSearch(String theUrl) {
|
||||
return getResourceSearch(theUrl, null);
|
||||
}
|
||||
|
||||
public abstract static class Flag {
|
||||
|
|
|
@ -21,17 +21,21 @@ package ca.uhn.fhir.jpa.searchparam;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
/**
|
||||
* A resource type along with a search parameter map. Everything you need to perform a search!
|
||||
* A resource type along with a search parameter map and partition id. Everything you need to perform a search!
|
||||
*/
|
||||
public class ResourceSearch {
|
||||
private final RuntimeResourceDefinition myRuntimeResourceDefinition;
|
||||
private final SearchParameterMap mySearchParameterMap;
|
||||
private final RequestPartitionId myRequestPartitionId;
|
||||
|
||||
public ResourceSearch(RuntimeResourceDefinition theRuntimeResourceDefinition, SearchParameterMap theSearchParameterMap) {
|
||||
public ResourceSearch(RuntimeResourceDefinition theRuntimeResourceDefinition, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId) {
|
||||
myRuntimeResourceDefinition = theRuntimeResourceDefinition;
|
||||
mySearchParameterMap = theSearchParameterMap;
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
public RuntimeResourceDefinition getRuntimeResourceDefinition() {
|
||||
|
@ -49,4 +53,12 @@ public class ResourceSearch {
|
|||
public boolean isDeleteExpunge() {
|
||||
return mySearchParameterMap.isDeleteExpunge();
|
||||
}
|
||||
|
||||
public Class<? extends IBaseResource> getResourceType() {
|
||||
return myRuntimeResourceDefinition.getImplementingClass();
|
||||
}
|
||||
|
||||
public RequestPartitionId getRequestPartitionId() {
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE7-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE8-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import javax.annotation.Nullable;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -206,7 +207,7 @@ public interface IBundleProvider {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the value of {@link #size()} and throws a {@link NullPointerException} of it is null
|
||||
* @return the value of {@link #size()} and throws a {@link NullPointerException} of it is null
|
||||
*/
|
||||
default int sizeOrThrowNpe() {
|
||||
Integer retVal = size();
|
||||
|
@ -214,4 +215,10 @@ public interface IBundleProvider {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the list of ids of all resources in the bundle
|
||||
*/
|
||||
default List<String> getAllResourceIds() {
|
||||
return getAllResources().stream().map(resource -> resource.getIdElement().getIdPart()).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,19 +20,6 @@ package ca.uhn.fhir.rest.api.server.storage;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IDeleteExpungeJobSubmitter {
|
||||
/**
|
||||
* @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number
|
||||
* @param theTenantId The tenant to perform the searches on
|
||||
* @param theUrlsToDeleteExpunge A list of strings of the form "/Patient?active=true"
|
||||
* @return The Spring Batch JobExecution that was started to run this batch job
|
||||
* @throws JobParametersInvalidException
|
||||
*/
|
||||
JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List<String> theUrlsToDeleteExpunge) throws JobParametersInvalidException;
|
||||
// Tag interface for Spring auto-wiring
|
||||
public interface IDeleteExpungeJobSubmitter extends IMultiUrlJobSubmitter {
|
||||
}
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
package ca.uhn.fhir.rest.api.server.storage;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IMultiUrlJobSubmitter {
|
||||
/**
|
||||
* @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number
|
||||
* @param theUrlsToProcess A list of strings of the form "/Patient?active=true"
|
||||
* @return The Spring Batch JobExecution that was started to run this batch job
|
||||
* @throws JobParametersInvalidException
|
||||
*/
|
||||
JobExecution submitJob(Integer theBatchSize, List<String> theUrlsToProcess, RequestDetails theRequest) throws JobParametersInvalidException;
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
package ca.uhn.fhir.rest.api.server.storage;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
|
||||
// Tag interface for Spring wiring
|
||||
public interface IReindexJobSubmitter extends IMultiUrlJobSubmitter {
|
||||
JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequest) throws JobParametersInvalidException;
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
package ca.uhn.fhir.rest.server.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IMultiUrlJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
|
||||
public class BaseMultiUrlProcessor {
|
||||
protected final FhirContext myFhirContext;
|
||||
private final IMultiUrlJobSubmitter myMultiUrlProcessorJobSubmitter;
|
||||
|
||||
public BaseMultiUrlProcessor(FhirContext theFhirContext, IMultiUrlJobSubmitter theMultiUrlProcessorJobSubmitter) {
|
||||
myMultiUrlProcessorJobSubmitter = theMultiUrlProcessorJobSubmitter;
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
protected IBaseParameters processUrls(List<String> theUrlsToProcess, Integer theBatchSize, RequestDetails theRequestDetails) {
|
||||
try {
|
||||
JobExecution jobExecution = myMultiUrlProcessorJobSubmitter.submitJob(theBatchSize, theUrlsToProcess, theRequestDetails);
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
return retval;
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected Integer getBatchSize(IPrimitiveType<BigDecimal> theBatchSize) {
|
||||
Integer batchSize = null;
|
||||
if (theBatchSize != null && !theBatchSize.isEmpty()) {
|
||||
batchSize = theBatchSize.getValue().intValue();
|
||||
}
|
||||
return batchSize;
|
||||
}
|
||||
}
|
|
@ -25,25 +25,16 @@ import ca.uhn.fhir.rest.annotation.Operation;
|
|||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class DeleteExpungeProvider {
|
||||
private final IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
|
||||
public class DeleteExpungeProvider extends BaseMultiUrlProcessor {
|
||||
public DeleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
|
||||
myDeleteExpungeJobSubmitter = theDeleteExpungeJobSubmitter;
|
||||
myFhirContext = theFhirContext;
|
||||
super(theFhirContext, theDeleteExpungeJobSubmitter);
|
||||
}
|
||||
|
||||
@Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false)
|
||||
|
@ -52,18 +43,7 @@ public class DeleteExpungeProvider {
|
|||
@OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType<BigDecimal> theBatchSize,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
try {
|
||||
List<String> urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
|
||||
Integer batchSize = null;
|
||||
if (theBatchSize != null && !theBatchSize.isEmpty()) {
|
||||
batchSize = theBatchSize.getValue().intValue();
|
||||
}
|
||||
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(batchSize, theRequestDetails, urls);
|
||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||
ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId());
|
||||
return retval;
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e);
|
||||
}
|
||||
List<String> urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
|
||||
return super.processUrls(urls, getBatchSize(theBatchSize), theRequestDetails);
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue