Delete expunge with cascade (#4931)
* Delete expunge with cascade * Work * Workgin * Version bump hibernate * Start working on delete cascade * Work on delete expunge * Test fixes * Test fixes * Add changelog * Work on cascade * Fixes * Test work * Test fixes
This commit is contained in:
parent
f8548c5d7b
commit
46857711c9
|
@ -111,7 +111,6 @@
|
||||||
<classpathDependencyExcludes>
|
<classpathDependencyExcludes>
|
||||||
<dependencyExclude>org.slf4j:slf4j-android</dependencyExclude>
|
<dependencyExclude>org.slf4j:slf4j-android</dependencyExclude>
|
||||||
</classpathDependencyExcludes>
|
</classpathDependencyExcludes>
|
||||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
|
|
|
@ -2122,7 +2122,7 @@ public enum Pointcut implements IPointcut {
|
||||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||||
* </li>
|
* </li>
|
||||||
* <li>
|
* <li>
|
||||||
* ca.uhn.fhir.context.RuntimeResourceDefinition - the resource type being accessed
|
* ca.uhn.fhir.context.RuntimeResourceDefinition - The resource type being accessed, or {@literal null} if no specific type is associated with the request.
|
||||||
* </li>
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* <p>
|
* <p>
|
||||||
|
|
|
@ -263,7 +263,9 @@ public class Constants {
|
||||||
public static final String PARAM_SEARCH_TOTAL_MODE = "_total";
|
public static final String PARAM_SEARCH_TOTAL_MODE = "_total";
|
||||||
public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket";
|
public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket";
|
||||||
public static final String PARAMETER_CASCADE_DELETE = "_cascade";
|
public static final String PARAMETER_CASCADE_DELETE = "_cascade";
|
||||||
|
public static final String PARAMETER_CASCADE_DELETE_MAX_ROUNDS = "_maxRounds";
|
||||||
public static final String HEADER_CASCADE = "X-Cascade";
|
public static final String HEADER_CASCADE = "X-Cascade";
|
||||||
|
public static final String HEADER_CASCADE_MAX_ROUNDS = "max-rounds";
|
||||||
public static final String CASCADE_DELETE = "delete";
|
public static final String CASCADE_DELETE = "delete";
|
||||||
public static final int MAX_RESOURCE_NAME_LENGTH = 100;
|
public static final int MAX_RESOURCE_NAME_LENGTH = 100;
|
||||||
public static final String CACHE_CONTROL_PRIVATE = "private";
|
public static final String CACHE_CONTROL_PRIVATE = "private";
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4931
|
||||||
|
title: "The `$delete-expunge` operation has a new parameter `cascade` that can be used to
|
||||||
|
request that resources with indexed references to other resources being deleted should also
|
||||||
|
be deleted."
|
|
@ -1123,7 +1123,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||||
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||||
request.setParameters(params);
|
request.setParameters(params);
|
||||||
myJobCoordinator.startInstance(request);
|
myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
|
|
||||||
ourLog.debug("Started reindex job with parameters {}", params);
|
ourLog.debug("Started reindex job with parameters {}", params);
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,9 @@ import org.slf4j.LoggerFactory;
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class DeleteExpungeSqlBuilder {
|
public class DeleteExpungeSqlBuilder {
|
||||||
|
@ -53,10 +55,10 @@ public class DeleteExpungeSqlBuilder {
|
||||||
|
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
List<String> convertPidsToDeleteExpungeSql(List<JpaPid> theJpaPids) {
|
DeleteExpungeSqlResult convertPidsToDeleteExpungeSql(List<JpaPid> theJpaPids, boolean theCascade, Integer theCascadeMaxRounds) {
|
||||||
List<Long> pids = JpaPid.toLongList(theJpaPids);
|
|
||||||
|
|
||||||
validateOkToDeleteAndExpunge(pids);
|
Set<Long> pids = JpaPid.toLongSet(theJpaPids);
|
||||||
|
validateOkToDeleteAndExpunge(pids, theCascade, theCascadeMaxRounds);
|
||||||
|
|
||||||
List<String> rawSql = new ArrayList<>();
|
List<String> rawSql = new ArrayList<>();
|
||||||
|
|
||||||
|
@ -70,10 +72,10 @@ public class DeleteExpungeSqlBuilder {
|
||||||
// Lastly we need to delete records from the resource table all of these other tables link to:
|
// Lastly we need to delete records from the resource table all of these other tables link to:
|
||||||
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
|
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
|
||||||
rawSql.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
|
rawSql.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
|
||||||
return rawSql;
|
return new DeleteExpungeSqlResult(rawSql, pids.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void validateOkToDeleteAndExpunge(List<Long> thePids) {
|
public void validateOkToDeleteAndExpunge(Set<Long> thePids, boolean theCascade, Integer theCascadeMaxRounds) {
|
||||||
if (!myStorageSettings.isEnforceReferentialIntegrityOnDelete()) {
|
if (!myStorageSettings.isEnforceReferentialIntegrityOnDelete()) {
|
||||||
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
||||||
return;
|
return;
|
||||||
|
@ -87,6 +89,40 @@ public class DeleteExpungeSqlBuilder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (theCascade) {
|
||||||
|
int cascadeMaxRounds = Integer.MAX_VALUE;
|
||||||
|
if (theCascadeMaxRounds != null) {
|
||||||
|
cascadeMaxRounds = theCascadeMaxRounds;
|
||||||
|
}
|
||||||
|
if (myStorageSettings.getMaximumDeleteConflictQueryCount() != null) {
|
||||||
|
if (myStorageSettings.getMaximumDeleteConflictQueryCount() < cascadeMaxRounds) {
|
||||||
|
cascadeMaxRounds = myStorageSettings.getMaximumDeleteConflictQueryCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
List<JpaPid> addedThisRound = new ArrayList<>();
|
||||||
|
for (ResourceLink next : conflictResourceLinks) {
|
||||||
|
Long nextPid = next.getSourceResourcePid();
|
||||||
|
if (thePids.add(nextPid)) {
|
||||||
|
addedThisRound.add(JpaPid.fromId(nextPid));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (addedThisRound.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (--cascadeMaxRounds > 0) {
|
||||||
|
conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||||
|
findResourceLinksWithTargetPidIn(addedThisRound, addedThisRound, conflictResourceLinks);
|
||||||
|
} else {
|
||||||
|
// We'll proceed to below where we throw an exception
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ResourceLink firstConflict = conflictResourceLinks.get(0);
|
ResourceLink firstConflict = conflictResourceLinks.get(0);
|
||||||
|
|
||||||
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
|
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
|
||||||
|
@ -119,4 +155,26 @@ public class DeleteExpungeSqlBuilder {
|
||||||
private String deleteRecordsByColumnSql(String thePidListString, ResourceForeignKey theResourceForeignKey) {
|
private String deleteRecordsByColumnSql(String thePidListString, ResourceForeignKey theResourceForeignKey) {
|
||||||
return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString;
|
return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static class DeleteExpungeSqlResult {
|
||||||
|
|
||||||
|
|
||||||
|
private final List<String> mySqlStatements;
|
||||||
|
private final int myRecordCount;
|
||||||
|
|
||||||
|
public DeleteExpungeSqlResult(List<String> theSqlStatments, int theRecordCount) {
|
||||||
|
mySqlStatements = theSqlStatments;
|
||||||
|
myRecordCount = theRecordCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getSqlStatements() {
|
||||||
|
return mySqlStatements;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRecordCount() {
|
||||||
|
return myRecordCount;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,14 +26,11 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.transaction.annotation.Propagation;
|
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
|
||||||
|
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@Transactional(propagation = Propagation.MANDATORY)
|
|
||||||
public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
|
public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSvcImpl.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSvcImpl.class);
|
||||||
|
|
||||||
|
@ -48,8 +45,9 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void deleteExpunge(List<JpaPid> theJpaPids) {
|
public int deleteExpunge(List<JpaPid> theJpaPids, boolean theCascade, Integer theCascadeMaxRounds) {
|
||||||
List<String> sqlList = myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(theJpaPids);
|
DeleteExpungeSqlBuilder.DeleteExpungeSqlResult sqlResult = myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(theJpaPids, theCascade, theCascadeMaxRounds);
|
||||||
|
List<String> sqlList = sqlResult.getSqlStatements();
|
||||||
|
|
||||||
ourLog.debug("Executing {} delete expunge sql commands", sqlList.size());
|
ourLog.debug("Executing {} delete expunge sql commands", sqlList.size());
|
||||||
long totalDeleted = 0;
|
long totalDeleted = 0;
|
||||||
|
@ -62,6 +60,12 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
|
||||||
clearHibernateSearchIndex(theJpaPids);
|
clearHibernateSearchIndex(theJpaPids);
|
||||||
|
|
||||||
// TODO KHS instead of logging progress, produce result chunks that get aggregated into a delete expunge report
|
// TODO KHS instead of logging progress, produce result chunks that get aggregated into a delete expunge report
|
||||||
|
return sqlResult.getRecordCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isCascadeSupported() {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -177,6 +177,6 @@ public class CascadingDeleteInterceptor {
|
||||||
*/
|
*/
|
||||||
@Nonnull
|
@Nonnull
|
||||||
protected DeleteCascadeModeEnum shouldCascade(@Nullable RequestDetails theRequest) {
|
protected DeleteCascadeModeEnum shouldCascade(@Nullable RequestDetails theRequest) {
|
||||||
return RestfulServerUtils.extractDeleteCascadeParameter(theRequest);
|
return RestfulServerUtils.extractDeleteCascadeParameter(theRequest).getMode();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
|
||||||
import ca.uhn.fhir.jpa.api.pid.MixedResourcePidList;
|
import ca.uhn.fhir.jpa.api.pid.MixedResourcePidList;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
|
@ -66,20 +67,26 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myFhirContext;
|
private FhirContext myFhirContext;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IHapiTransactionService myTransactionService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isAllResourceTypeSupported() {
|
public boolean isAllResourceTypeSupported() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional
|
|
||||||
public IResourcePidList fetchResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theUrl) {
|
public IResourcePidList fetchResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theUrl) {
|
||||||
|
return myTransactionService
|
||||||
|
.withSystemRequest()
|
||||||
|
.withRequestPartitionId(theRequestPartitionId)
|
||||||
|
.execute(()->{
|
||||||
if (theUrl == null) {
|
if (theUrl == null) {
|
||||||
return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId);
|
return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId);
|
||||||
} else {
|
} else {
|
||||||
return fetchResourceIdsPageWithUrl(theStart, theEnd, thePageSize, theUrl, theRequestPartitionId);
|
return fetchResourceIdsPageWithUrl(theStart, theEnd, thePageSize, theUrl, theRequestPartitionId);
|
||||||
}
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private IResourcePidList fetchResourceIdsPageWithUrl(Date theStart, Date theEnd, int thePageSize, String theUrl, RequestPartitionId theRequestPartitionId) {
|
private IResourcePidList fetchResourceIdsPageWithUrl(Date theStart, Date theEnd, int thePageSize, String theUrl, RequestPartitionId theRequestPartitionId) {
|
||||||
|
@ -104,7 +111,7 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
||||||
lastDate = dao.readByPid(lastResourcePersistentId, true).getMeta().getLastUpdated();
|
lastDate = dao.readByPid(lastResourcePersistentId, true).getMeta().getLastUpdated();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new HomogeneousResourcePidList(resourceType, ids, lastDate);
|
return new HomogeneousResourcePidList(resourceType, ids, lastDate, theRequestPartitionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -136,6 +143,6 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
||||||
|
|
||||||
Date lastDate = (Date) content.get(content.size() - 1)[2];
|
Date lastDate = (Date) content.get(content.size() - 1)[2];
|
||||||
|
|
||||||
return new MixedResourcePidList(types, ids, lastDate);
|
return new MixedResourcePidList(types, ids, lastDate, theRequestPartitionId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,7 @@ import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||||
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
|
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
|
||||||
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
|
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import ca.uhn.fhir.util.TimeoutManager;
|
import ca.uhn.fhir.util.TimeoutManager;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
@ -372,7 +373,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas
|
||||||
parameters.setCodeSystemVersionPid(theCodeSystemVersionPid);
|
parameters.setCodeSystemVersionPid(theCodeSystemVersionPid);
|
||||||
request.setParameters(parameters);
|
request.setParameters(parameters);
|
||||||
|
|
||||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse response = myJobCoordinator.startInstance(new SystemRequestDetails(), request);
|
||||||
myJobExecutions.add(response.getInstanceId());
|
myJobExecutions.add(response.getInstanceId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -382,7 +383,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas
|
||||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||||
request.setParameters(parameters);
|
request.setParameters(parameters);
|
||||||
request.setJobDefinitionId(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
request.setJobDefinitionId(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse response = myJobCoordinator.startInstance(new SystemRequestDetails(), request);
|
||||||
myJobExecutions.add(response.getInstanceId());
|
myJobExecutions.add(response.getInstanceId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -212,16 +212,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -242,11 +232,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -283,9 +269,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest
|
||||||
}
|
}
|
||||||
|
|
||||||
private BulkExportJobResults getBulkExportJobResults(BulkDataExportOptions theOptions) {
|
private BulkExportJobResults getBulkExportJobResults(BulkDataExportOptions theOptions) {
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
||||||
|
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
|
|
||||||
|
|
|
@ -95,15 +95,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
|
|
@ -83,6 +83,6 @@ public class GoldenResourceSearchSvcImpl implements IGoldenResourceSearchSvc {
|
||||||
lastDate = dao.readByPid(ids.get(ids.size() - 1)).getMeta().getLastUpdated();
|
lastDate = dao.readByPid(ids.get(ids.size() - 1)).getMeta().getLastUpdated();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new HomogeneousResourcePidList(theResourceType, ids, lastDate);
|
return new HomogeneousResourcePidList(theResourceType, ids, lastDate, theRequestPartitionId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -220,7 +220,7 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
||||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||||
request.setJobDefinitionId(MdmClearAppCtx.JOB_MDM_CLEAR);
|
request.setJobDefinitionId(MdmClearAppCtx.JOB_MDM_CLEAR);
|
||||||
request.setParameters(params);
|
request.setParameters(params);
|
||||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
String id = response.getInstanceId();
|
String id = response.getInstanceId();
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
||||||
|
@ -244,7 +244,7 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
||||||
request.setParameters(params);
|
request.setParameters(params);
|
||||||
request.setJobDefinitionId(MdmSubmitAppCtx.MDM_SUBMIT_JOB);
|
request.setJobDefinitionId(MdmSubmitAppCtx.MDM_SUBMIT_JOB);
|
||||||
|
|
||||||
Batch2JobStartResponse batch2JobStartResponse = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse batch2JobStartResponse = myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
String id = batch2JobStartResponse.getInstanceId();
|
String id = batch2JobStartResponse.getInstanceId();
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
||||||
|
|
|
@ -23,8 +23,10 @@ import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any
|
* JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any
|
||||||
|
@ -61,7 +63,15 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<JpaPid> fromLongList(List<Long> theResultList) {
|
public static Set<Long> toLongSet(Collection<JpaPid> thePids) {
|
||||||
|
Set<Long> retVal = new HashSet<>(thePids.size());
|
||||||
|
for (JpaPid next : thePids) {
|
||||||
|
retVal.add(next.getId());
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<JpaPid> fromLongList(Collection<Long> theResultList) {
|
||||||
List<JpaPid> retVal = new ArrayList<>(theResultList.size());
|
List<JpaPid> retVal = new ArrayList<>(theResultList.size());
|
||||||
for (Long next : theResultList) {
|
for (Long next : theResultList) {
|
||||||
retVal.add(fromId(next));
|
retVal.add(fromId(next));
|
||||||
|
|
|
@ -185,10 +185,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
|
|
|
@ -35,16 +35,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -65,11 +55,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -106,9 +92,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
|
@ -122,28 +122,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
myStorageSettings.setHistoryCountMode(JpaStorageSettings.DEFAULT_HISTORY_COUNT_MODE);
|
myStorageSettings.setHistoryCountMode(JpaStorageSettings.DEFAULT_HISTORY_COUNT_MODE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
assertNotGone(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This gets called from assertGone too! Careful about exceptions...
|
|
||||||
*/
|
|
||||||
private void assertNotGone(IIdType theId) {
|
|
||||||
if ("Patient".equals(theId.getResourceType())) {
|
|
||||||
myPatientDao.read(theId, mySrd);
|
|
||||||
} else if ("Organization".equals(theId.getResourceType())) {
|
|
||||||
myOrganizationDao.read(theId, mySrd);
|
|
||||||
} else {
|
|
||||||
fail("No type");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeDisableResultReuse() {
|
public void beforeDisableResultReuse() {
|
||||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||||
|
|
|
@ -41,17 +41,8 @@ public class ResourceProviderExpungeDstu2Test extends BaseResourceProviderDstu2T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
getDao(theId).read(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertStillThere(IIdType theId) {
|
private void assertStillThere(IIdType theId) {
|
||||||
getDao(theId).read(theId);
|
assertNotGone(theId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -35,16 +35,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -65,11 +55,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -106,9 +92,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
|
@ -140,31 +140,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
||||||
myStorageSettings.setHistoryCountMode(JpaStorageSettings.DEFAULT_HISTORY_COUNT_MODE);
|
myStorageSettings.setHistoryCountMode(JpaStorageSettings.DEFAULT_HISTORY_COUNT_MODE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
assertNotGone(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This gets called from assertGone too! Careful about exceptions...
|
|
||||||
*/
|
|
||||||
private void assertNotGone(IIdType theId) {
|
|
||||||
if ("Patient".equals(theId.getResourceType())) {
|
|
||||||
myPatientDao.read(theId, mySrd);
|
|
||||||
} else if ("Organization".equals(theId.getResourceType())) {
|
|
||||||
myOrganizationDao.read(theId, mySrd);
|
|
||||||
} else if ("CodeSystem".equals(theId.getResourceType())) {
|
|
||||||
myCodeSystemDao.read(theId, mySrd);
|
|
||||||
} else {
|
|
||||||
fail("Can't handle type: " + theId.getResourceType());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeDisableResultReuse() {
|
public void beforeDisableResultReuse() {
|
||||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||||
|
|
|
@ -50,17 +50,8 @@ public class ResourceProviderExpungeDstu3Test extends BaseResourceProviderDstu3T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
getDao(theId).read(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertStillThere(IIdType theId) {
|
private void assertStillThere(IIdType theId) {
|
||||||
getDao(theId).read(theId);
|
assertNotGone(theId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -62,17 +62,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -93,11 +82,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<argLine>@{argLine} ${surefire_jvm_args} -XX:+HeapDumpOnOutOfMemoryError</argLine>
|
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -128,20 +113,6 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
|
||||||
<id>CI</id>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
</profiles>
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -240,7 +240,7 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
|
||||||
private String startJob(BulkDataExportOptions theOptions) {
|
private String startJob(BulkDataExportOptions theOptions) {
|
||||||
BulkExportParameters startRequest = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
|
BulkExportParameters startRequest = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
|
||||||
startRequest.setUseExistingJobsFirst(false);
|
startRequest.setUseExistingJobsFirst(false);
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(startRequest);
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(null, startRequest);
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
return startResponse.getInstanceId();
|
return startResponse.getInstanceId();
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,6 +79,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
import static org.mockito.ArgumentMatchers.anyString;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.eq;
|
import static org.mockito.Mockito.eq;
|
||||||
import static org.mockito.Mockito.lenient;
|
import static org.mockito.Mockito.lenient;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
@ -160,7 +161,7 @@ public class BulkDataExportProviderTest {
|
||||||
|
|
||||||
private BulkExportParameters verifyJobStart() {
|
private BulkExportParameters verifyJobStart() {
|
||||||
ArgumentCaptor<Batch2BaseJobParameters> startJobCaptor = ArgumentCaptor.forClass(Batch2BaseJobParameters.class);
|
ArgumentCaptor<Batch2BaseJobParameters> startJobCaptor = ArgumentCaptor.forClass(Batch2BaseJobParameters.class);
|
||||||
verify(myJobRunner).startNewJob(startJobCaptor.capture());
|
verify(myJobRunner).startNewJob(isNotNull(), startJobCaptor.capture());
|
||||||
Batch2BaseJobParameters sp = startJobCaptor.getValue();
|
Batch2BaseJobParameters sp = startJobCaptor.getValue();
|
||||||
assertTrue(sp instanceof BulkExportParameters);
|
assertTrue(sp instanceof BulkExportParameters);
|
||||||
return (BulkExportParameters) sp;
|
return (BulkExportParameters) sp;
|
||||||
|
@ -197,7 +198,7 @@ public class BulkDataExportProviderTest {
|
||||||
String practitionerResource = "Practitioner";
|
String practitionerResource = "Practitioner";
|
||||||
String filter = "Patient?identifier=foo";
|
String filter = "Patient?identifier=foo";
|
||||||
String postFetchFilter = "Patient?_tag=foo";
|
String postFetchFilter = "Patient?_tag=foo";
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
InstantType now = InstantType.now();
|
InstantType now = InstantType.now();
|
||||||
|
@ -249,7 +250,7 @@ public class BulkDataExportProviderTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOmittingOutputFormatDefaultsToNdjson() throws IOException {
|
public void testOmittingOutputFormatDefaultsToNdjson() throws IOException {
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
|
@ -270,7 +271,7 @@ public class BulkDataExportProviderTest {
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@MethodSource("paramsProvider")
|
@MethodSource("paramsProvider")
|
||||||
public void testSuccessfulInitiateBulkRequest_GetWithPartitioning(boolean partitioningEnabled) throws IOException {
|
public void testSuccessfulInitiateBulkRequest_GetWithPartitioning(boolean partitioningEnabled) throws IOException {
|
||||||
when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse());
|
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
InstantType now = InstantType.now();
|
InstantType now = InstantType.now();
|
||||||
|
|
||||||
|
@ -307,7 +308,7 @@ public class BulkDataExportProviderTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException {
|
public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException {
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
|
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
|
||||||
|
@ -583,7 +584,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
|
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse(G_JOB_ID));
|
.thenReturn(createJobStartResponse(G_JOB_ID));
|
||||||
|
|
||||||
InstantType now = InstantType.now();
|
InstantType now = InstantType.now();
|
||||||
|
@ -624,7 +625,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
|
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse(G_JOB_ID));
|
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
|
||||||
|
|
||||||
InstantType now = InstantType.now();
|
InstantType now = InstantType.now();
|
||||||
|
|
||||||
|
@ -713,7 +714,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
|
public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
|
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
// test
|
// test
|
||||||
|
@ -739,7 +740,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testInitiateWithPostAndMultipleTypeFilters() throws IOException {
|
public void testInitiateWithPostAndMultipleTypeFilters() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse());
|
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
||||||
|
@ -771,7 +772,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testInitiateBulkExportOnPatient_noTypeParam_addsTypeBeforeBulkExport() throws IOException {
|
public void testInitiateBulkExportOnPatient_noTypeParam_addsTypeBeforeBulkExport() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
|
@ -797,7 +798,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testInitiatePatientExportRequest() throws IOException {
|
public void testInitiatePatientExportRequest() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
InstantType now = InstantType.now();
|
InstantType now = InstantType.now();
|
||||||
|
@ -836,7 +837,7 @@ public class BulkDataExportProviderTest {
|
||||||
startResponse.setUsesCachedResult(true);
|
startResponse.setUsesCachedResult(true);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
|
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||||
.thenReturn(startResponse);
|
.thenReturn(startResponse);
|
||||||
|
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
|
@ -870,7 +871,7 @@ public class BulkDataExportProviderTest {
|
||||||
myStorageSettings.setEnableBulkExportJobReuse(false);
|
myStorageSettings.setEnableBulkExportJobReuse(false);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
|
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||||
.thenReturn(startResponse);
|
.thenReturn(startResponse);
|
||||||
|
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
|
@ -900,7 +901,7 @@ public class BulkDataExportProviderTest {
|
||||||
Batch2JobStartResponse startResponse = createJobStartResponse();
|
Batch2JobStartResponse startResponse = createJobStartResponse();
|
||||||
startResponse.setUsesCachedResult(true);
|
startResponse.setUsesCachedResult(true);
|
||||||
startResponse.setInstanceId(A_JOB_ID);
|
startResponse.setInstanceId(A_JOB_ID);
|
||||||
when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
|
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||||
.thenReturn(startResponse);
|
.thenReturn(startResponse);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
|
@ -994,7 +995,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testGetBulkExport_outputFormat_FhirNdJson_inHeader() throws IOException {
|
public void testGetBulkExport_outputFormat_FhirNdJson_inHeader() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
// call
|
// call
|
||||||
|
@ -1017,7 +1018,7 @@ public class BulkDataExportProviderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testGetBulkExport_outputFormat_FhirNdJson_inUrl() throws IOException {
|
public void testGetBulkExport_outputFormat_FhirNdJson_inUrl() throws IOException {
|
||||||
// when
|
// when
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
|
|
||||||
// call
|
// call
|
||||||
|
|
|
@ -711,7 +711,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
private JobInstance verifyBulkExportResults(BulkDataExportOptions theOptions, List<String> theContainedList, List<String> theExcludedList) {
|
private JobInstance verifyBulkExportResults(BulkDataExportOptions theOptions, List<String> theContainedList, List<String> theExcludedList) {
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
||||||
|
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
assertFalse(startResponse.isUsesCachedResult());
|
assertFalse(startResponse.isUsesCachedResult());
|
||||||
|
@ -781,7 +781,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
try {
|
try {
|
||||||
myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
@ -800,7 +800,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
try {
|
try {
|
||||||
myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
@ -819,7 +819,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
try {
|
try {
|
||||||
myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
@ -838,7 +838,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
try {
|
try {
|
||||||
myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
@ -857,7 +857,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
try {
|
try {
|
||||||
myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
|
|
@ -447,7 +447,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
|
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
|
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
|
|
||||||
|
@ -567,7 +567,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||||
|
|
||||||
Batch2JobStartResponse job = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
Batch2JobStartResponse job = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
|
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
|
||||||
ourLog.debug("Job status after awaiting - {}", myJobRunner.getJobInfo(job.getInstanceId()).getStatus());
|
ourLog.debug("Job status after awaiting - {}", myJobRunner.getJobInfo(job.getInstanceId()).getStatus());
|
||||||
await()
|
await()
|
||||||
|
@ -1468,7 +1468,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||||
|
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
|
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@ import java.util.List;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
@ -207,7 +208,7 @@ class BaseHapiFhirResourceDaoTest {
|
||||||
mySvc.requestReindexForRelatedResources(false, base, new ServletRequestDetails());
|
mySvc.requestReindexForRelatedResources(false, base, new ServletRequestDetails());
|
||||||
|
|
||||||
ArgumentCaptor<JobInstanceStartRequest> requestCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
ArgumentCaptor<JobInstanceStartRequest> requestCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||||
Mockito.verify(myJobCoordinator).startInstance(requestCaptor.capture());
|
Mockito.verify(myJobCoordinator).startInstance(isNotNull(), requestCaptor.capture());
|
||||||
|
|
||||||
JobInstanceStartRequest actualRequest = requestCaptor.getValue();
|
JobInstanceStartRequest actualRequest = requestCaptor.getValue();
|
||||||
assertNotNull(actualRequest);
|
assertNotNull(actualRequest);
|
||||||
|
@ -228,7 +229,7 @@ class BaseHapiFhirResourceDaoTest {
|
||||||
mySvc.requestReindexForRelatedResources(false, base, new ServletRequestDetails());
|
mySvc.requestReindexForRelatedResources(false, base, new ServletRequestDetails());
|
||||||
|
|
||||||
ArgumentCaptor<JobInstanceStartRequest> requestCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
ArgumentCaptor<JobInstanceStartRequest> requestCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||||
Mockito.verify(myJobCoordinator).startInstance(requestCaptor.capture());
|
Mockito.verify(myJobCoordinator).startInstance(isNotNull(), requestCaptor.capture());
|
||||||
|
|
||||||
JobInstanceStartRequest actualRequest = requestCaptor.getValue();
|
JobInstanceStartRequest actualRequest = requestCaptor.getValue();
|
||||||
assertNotNull(actualRequest);
|
assertNotNull(actualRequest);
|
||||||
|
|
|
@ -6,9 +6,10 @@ import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||||
import ca.uhn.fhir.util.BundleBuilder;
|
import ca.uhn.fhir.util.BundleBuilder;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
@ -20,9 +21,9 @@ import org.hl7.fhir.r4.model.Reference;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -52,39 +53,73 @@ class DeleteExpungeDaoTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteCascadeExpungeReturns400() {
|
public void testCascade_MultiLevel_Success() {
|
||||||
// Create new organization
|
// Setup
|
||||||
Organization organization = new Organization();
|
|
||||||
organization.setName("FOO");
|
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
Patient patient = new Patient();
|
// Create a chain of dependent references
|
||||||
patient.setManagingOrganization(new Reference(organizationId));
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType o1b = createObservation(withReference("hasMember", o1));
|
||||||
|
IIdType o1c = createObservation(withReference("hasMember", o1b));
|
||||||
|
|
||||||
// Try to delete _cascade and _expunge on the organization
|
// validate precondition
|
||||||
BaseServerResponseException e = assertThrows(BaseServerResponseException.class, () -> {
|
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
myOrganizationDao
|
assertEquals(3, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
.deleteByUrl("Organization?" + "_cascade=delete&" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get not implemented HTTP 400 error
|
// execute
|
||||||
assertEquals(Constants.STATUS_HTTP_400_BAD_REQUEST, e.getStatusCode());
|
String url = "Patient?" +
|
||||||
assertEquals(Msg.code(964) + "_expunge cannot be used with _cascade", e.getMessage());
|
JpaConstants.PARAM_DELETE_EXPUNGE + "=true";
|
||||||
|
when(mySrd.getParameters()).thenReturn(Map.of(
|
||||||
|
Constants.PARAMETER_CASCADE_DELETE, new String[]{Constants.CASCADE_DELETE},
|
||||||
|
JpaConstants.PARAM_DELETE_EXPUNGE, new String[]{"true"},
|
||||||
|
Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS, new String[]{"10"}
|
||||||
|
));
|
||||||
|
DeleteMethodOutcome outcome = myOrganizationDao.deleteByUrl(url, mySrd);
|
||||||
|
String jobId = jobExecutionIdFromOutcome(outcome);
|
||||||
|
JobInstance job = myBatch2JobHelper.awaitJobCompletion(jobId);
|
||||||
|
|
||||||
|
// Validate
|
||||||
// Try to delete with header 'X-Cascade' = delete
|
assertEquals(4, job.getCombinedRecordsProcessed());
|
||||||
when(mySrd.getHeader(Constants.HEADER_CASCADE)).thenReturn(Constants.CASCADE_DELETE);
|
assertDoesntExist(p1);
|
||||||
e = assertThrows(BaseServerResponseException.class, () -> {
|
assertDoesntExist(o1);
|
||||||
myOrganizationDao
|
assertDoesntExist(o1b);
|
||||||
.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
|
assertDoesntExist(o1c);
|
||||||
});
|
|
||||||
|
|
||||||
// Get not implemented HTTP 400 error
|
|
||||||
assertEquals(Constants.STATUS_HTTP_400_BAD_REQUEST, e.getStatusCode());
|
|
||||||
assertEquals(Msg.code(964) + "_expunge cannot be used with _cascade", e.getMessage());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCascade_MultiLevel_NotEnoughRounds() {
|
||||||
|
// Setup
|
||||||
|
|
||||||
|
// Create a chain of dependent references
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType o1b = createObservation(withReference("hasMember", o1));
|
||||||
|
IIdType o1c = createObservation(withReference("hasMember", o1b));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
assertEquals(3, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
|
||||||
|
String url = "Patient?" +
|
||||||
|
JpaConstants.PARAM_DELETE_EXPUNGE + "=true";
|
||||||
|
when(mySrd.getParameters()).thenReturn(Map.of(
|
||||||
|
Constants.PARAMETER_CASCADE_DELETE, new String[]{Constants.CASCADE_DELETE},
|
||||||
|
JpaConstants.PARAM_DELETE_EXPUNGE, new String[]{"true"},
|
||||||
|
Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS, new String[]{"2"}
|
||||||
|
));
|
||||||
|
DeleteMethodOutcome outcome = myOrganizationDao.deleteByUrl(url, mySrd);
|
||||||
|
String jobId = jobExecutionIdFromOutcome(outcome);
|
||||||
|
JobInstance job = myBatch2JobHelper.awaitJobFailure(jobId);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
assertThat(job.getErrorMessage(), containsString("Unable to delete"));
|
||||||
|
assertNotGone(p1);
|
||||||
|
assertNotGone(o1);
|
||||||
|
assertNotGone(o1b);
|
||||||
|
assertNotGone(o1c);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteExpungeThrowExceptionIfForeignKeyLinksExists() {
|
public void testDeleteExpungeThrowExceptionIfForeignKeyLinksExists() {
|
||||||
// setup
|
// setup
|
||||||
|
|
|
@ -86,7 +86,6 @@ import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.util.comparator.ComparableComparator;
|
import org.springframework.util.comparator.ComparableComparator;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.persistence.Id;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -841,7 +840,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
|
|
||||||
// Test
|
// Test
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids), sink, "instance-id", "chunk-id");
|
RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids, null), sink, "instance-id", "chunk-id", false, null);
|
||||||
|
|
||||||
// Verify
|
// Verify
|
||||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
|
|
@ -178,30 +178,6 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
||||||
myStorageSettings.setMassIngestionMode(false);
|
myStorageSettings.setMassIngestionMode(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
assertNotGone(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This gets called from assertGone too! Careful about exceptions...
|
|
||||||
*/
|
|
||||||
private void assertNotGone(IIdType theId) {
|
|
||||||
if ("Patient".equals(theId.getResourceType())) {
|
|
||||||
myPatientDao.read(theId, mySrd);
|
|
||||||
} else if ("Organization".equals(theId.getResourceType())) {
|
|
||||||
myOrganizationDao.read(theId, mySrd);
|
|
||||||
} else if ("CodeSystem".equals(theId.getResourceType())) {
|
|
||||||
myCodeSystemDao.read(theId, mySrd);
|
|
||||||
} else {
|
|
||||||
fail("Can't handle type: " + theId.getResourceType());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeDisableResultReuse() {
|
public void beforeDisableResultReuse() {
|
||||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||||
|
|
|
@ -67,9 +67,17 @@ public class JpaHistoryR4Test extends BaseJpaR4SystemTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTypeHistory_CountAccurate() {
|
public void testTypeHistory_CountAccurate() {
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertEquals(0, myResourceHistoryTableDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
myStorageSettings.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
|
myStorageSettings.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
|
||||||
create20Patients();
|
create20Patients();
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertEquals(20, myResourceHistoryTableDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Perform initial history
|
* Perform initial history
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
package ca.uhn.fhir.jpa.dao.r4;
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||||
|
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx;
|
||||||
|
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobParameters;
|
||||||
|
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||||
|
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
|
@ -7,6 +12,7 @@ import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||||
|
@ -67,6 +73,7 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
|
@ -104,6 +111,9 @@ import static org.mockito.Mockito.verify;
|
||||||
public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(PartitioningSqlR4Test.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(PartitioningSqlR4Test.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IJobCoordinator myJobCoordinator;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void disableAdvanceIndexing() {
|
public void disableAdvanceIndexing() {
|
||||||
myStorageSettings.setAdvancedHSearchIndexing(false);
|
myStorageSettings.setAdvancedHSearchIndexing(false);
|
||||||
|
@ -679,6 +689,56 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
assertPersistedPartitionIdMatches(patientId);
|
assertPersistedPartitionIdMatches(patientId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteExpunge_Cascade() {
|
||||||
|
myPartitionSettings.setPartitioningEnabled(true);
|
||||||
|
|
||||||
|
addCreatePartition(myPartitionId, myPartitionDate);
|
||||||
|
addCreatePartition(myPartitionId, myPartitionDate);
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
|
||||||
|
addCreatePartition(myPartitionId2, myPartitionDate);
|
||||||
|
addCreatePartition(myPartitionId2, myPartitionDate);
|
||||||
|
IIdType p2 = createPatient(withActiveTrue());
|
||||||
|
IIdType o2 = createObservation(withSubject(p2));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
addReadAllPartitions();
|
||||||
|
addReadAllPartitions();
|
||||||
|
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).size());
|
||||||
|
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous(), mySrd).size());
|
||||||
|
addReadPartition(myPartitionId);
|
||||||
|
addReadPartition(myPartitionId);
|
||||||
|
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).size());
|
||||||
|
assertEquals(1, myObservationDao.search(SearchParameterMap.newSynchronous(), mySrd).size());
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient?_id=" + p1.getIdPart() + "," + p2.getIdPart());
|
||||||
|
jobParameters.setRequestPartitionId(RequestPartitionId.fromPartitionId(myPartitionId));
|
||||||
|
jobParameters.setCascade(true);
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
JobInstance outcome = myBatch2JobHelper.awaitJobCompletion(startResponse);
|
||||||
|
assertEquals(2, outcome.getCombinedRecordsProcessed());
|
||||||
|
addReadAllPartitions();
|
||||||
|
assertDoesntExist(p1);
|
||||||
|
addReadAllPartitions();
|
||||||
|
assertDoesntExist(o1);
|
||||||
|
addReadAllPartitions();
|
||||||
|
assertNotGone(p2);
|
||||||
|
addReadAllPartitions();
|
||||||
|
assertNotGone(o2);
|
||||||
|
}
|
||||||
|
|
||||||
private void assertPersistedPartitionIdMatches(Long patientId) {
|
private void assertPersistedPartitionIdMatches(Long patientId) {
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
// HFJ_RESOURCE
|
// HFJ_RESOURCE
|
||||||
|
|
|
@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||||
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
|
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.model.DiagnosticReport;
|
import org.hl7.fhir.r4.model.DiagnosticReport;
|
||||||
import org.hl7.fhir.r4.model.Observation;
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
@ -18,7 +19,10 @@ import org.hl7.fhir.r4.model.Reference;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -27,7 +31,7 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
||||||
private Batch2JobHelper myBatch2JobHelper;
|
private Batch2JobHelper myBatch2JobHelper;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteExpunge() throws Exception {
|
public void testDeleteExpunge() {
|
||||||
// setup
|
// setup
|
||||||
Patient patientActive = new Patient();
|
Patient patientActive = new Patient();
|
||||||
patientActive.setActive(true);
|
patientActive.setActive(true);
|
||||||
|
@ -80,6 +84,159 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
||||||
assertDocumentCountMatchesResourceCount(myPatientDao);
|
assertDocumentCountMatchesResourceCount(myPatientDao);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCascade_FailIfNotEnabled() {
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType p2 = createPatient(withActiveTrue());
|
||||||
|
IIdType o2 = createObservation(withSubject(p2));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient?_id=" + p1.getIdPart());
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
JobInstance failure = myBatch2JobHelper.awaitJobFailure(startResponse);
|
||||||
|
assertThat(failure.getErrorMessage(), containsString("Unable to delete " + p1.getValue() + " because " + o1.getValue() + " refers to it"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCascade() {
|
||||||
|
// Setup
|
||||||
|
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType p2 = createPatient(withActiveTrue());
|
||||||
|
IIdType o2 = createObservation(withSubject(p2));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient?_id=" + p1.getIdPart());
|
||||||
|
jobParameters.setCascade(true);
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
JobInstance outcome = myBatch2JobHelper.awaitJobCompletion(startResponse);
|
||||||
|
assertEquals(2, outcome.getCombinedRecordsProcessed());
|
||||||
|
assertDoesntExist(p1);
|
||||||
|
assertDoesntExist(o1);
|
||||||
|
assertNotGone(p2);
|
||||||
|
assertNotGone(o2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCascade_MultiLevel_Success() {
|
||||||
|
// Setup
|
||||||
|
|
||||||
|
// Create a chain of dependent references
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType o1b = createObservation(withReference("hasMember", o1));
|
||||||
|
IIdType o1c = createObservation(withReference("hasMember", o1b));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
assertEquals(3, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient?_id=" + p1.getIdPart());
|
||||||
|
jobParameters.setCascade(true);
|
||||||
|
jobParameters.setCascadeMaxRounds(4);
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
JobInstance outcome = myBatch2JobHelper.awaitJobCompletion(startResponse);
|
||||||
|
assertEquals(4, outcome.getCombinedRecordsProcessed());
|
||||||
|
assertDoesntExist(p1);
|
||||||
|
assertDoesntExist(o1);
|
||||||
|
assertDoesntExist(o1b);
|
||||||
|
assertDoesntExist(o1c);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCascade_MultiLevel_NotEnoughRounds() {
|
||||||
|
// Setup
|
||||||
|
|
||||||
|
// Create a chain of dependent references
|
||||||
|
IIdType p1 = createPatient(withActiveTrue());
|
||||||
|
IIdType o1 = createObservation(withSubject(p1));
|
||||||
|
IIdType o1b = createObservation(withReference("hasMember", o1));
|
||||||
|
IIdType o1c = createObservation(withReference("hasMember", o1b));
|
||||||
|
|
||||||
|
// validate precondition
|
||||||
|
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
assertEquals(3, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient?_id=" + p1.getIdPart());
|
||||||
|
jobParameters.setCascade(true);
|
||||||
|
jobParameters.setCascadeMaxRounds(1);
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
JobInstance outcome = myBatch2JobHelper.awaitJobFailure(startResponse);
|
||||||
|
assertThat(outcome.getErrorMessage(), containsString("refers to it via the path"));
|
||||||
|
assertNotGone(p1);
|
||||||
|
assertNotGone(o1);
|
||||||
|
assertNotGone(o1b);
|
||||||
|
assertNotGone(o1c);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidParams_NoSearchParams() {
|
||||||
|
// Setup
|
||||||
|
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||||
|
jobParameters.addUrl("Patient/123");
|
||||||
|
|
||||||
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
|
startRequest.setParameters(jobParameters);
|
||||||
|
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
|
||||||
|
|
||||||
|
// execute
|
||||||
|
try {
|
||||||
|
myJobCoordinator.startInstance(startRequest);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
|
||||||
|
// validate
|
||||||
|
assertThat(e.getMessage(), containsString("Delete expunge URLs must be in the format"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public void assertDocumentCountMatchesResourceCount(IFhirResourceDao dao) {
|
public void assertDocumentCountMatchesResourceCount(IFhirResourceDao dao) {
|
||||||
String resourceType = myFhirContext.getResourceType(dao.getResourceType());
|
String resourceType = myFhirContext.getResourceType(dao.getResourceType());
|
||||||
long resourceCount = dao.search(new SearchParameterMap().setLoadSynchronous(true)).size();
|
long resourceCount = dao.search(new SearchParameterMap().setLoadSynchronous(true)).size();
|
||||||
|
|
|
@ -204,7 +204,7 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
|
||||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||||
|
|
||||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||||
|
|
||||||
assertNotNull(startResponse);
|
assertNotNull(startResponse);
|
||||||
|
|
||||||
|
|
|
@ -117,7 +117,9 @@ public class NpmR4Test extends BaseJpaR4Test {
|
||||||
|
|
||||||
int port = JettyUtil.getPortForStartedServer(myServer);
|
int port = JettyUtil.getPortForStartedServer(myServer);
|
||||||
jpaPackageCache.getPackageServers().clear();
|
jpaPackageCache.getPackageServers().clear();
|
||||||
jpaPackageCache.addPackageServer(new PackageServer("http://localhost:" + port));
|
String url = "http://localhost:" + port;
|
||||||
|
ourLog.info("Package server is at base: {}", url);
|
||||||
|
jpaPackageCache.addPackageServer(new PackageServer(url));
|
||||||
|
|
||||||
myFakeNpmServlet.responses.clear();
|
myFakeNpmServlet.responses.clear();
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,15 +119,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
getDao(theId).read(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertStillThere(IIdType theId) {
|
private void assertStillThere(IIdType theId) {
|
||||||
getDao(theId).read(theId);
|
getDao(theId).read(theId);
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,7 @@ public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProv
|
||||||
String jobId = BatchHelperR4.jobIdFromBatch2Parameters(response);
|
String jobId = BatchHelperR4.jobIdFromBatch2Parameters(response);
|
||||||
myBatch2JobHelper.awaitJobCompletion(jobId);
|
myBatch2JobHelper.awaitJobCompletion(jobId);
|
||||||
|
|
||||||
assertThat(interceptor.requestPartitionIds, hasSize(3));
|
assertThat(interceptor.requestPartitionIds, hasSize(5));
|
||||||
RequestPartitionId partitionId = interceptor.requestPartitionIds.get(0);
|
RequestPartitionId partitionId = interceptor.requestPartitionIds.get(0);
|
||||||
assertEquals(TENANT_B_ID, partitionId.getFirstPartitionIdOrNull());
|
assertEquals(TENANT_B_ID, partitionId.getFirstPartitionIdOrNull());
|
||||||
assertEquals(TENANT_B, partitionId.getFirstPartitionNameOrNull());
|
assertEquals(TENANT_B, partitionId.getFirstPartitionNameOrNull());
|
||||||
|
|
|
@ -69,6 +69,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
import static org.mockito.ArgumentMatchers.anyString;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.spy;
|
import static org.mockito.Mockito.spy;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -672,7 +673,7 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
|
||||||
|
|
||||||
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
|
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
|
||||||
startResponse.setInstanceId(jobId);
|
startResponse.setInstanceId(jobId);
|
||||||
when(myJobRunner.startNewJob(any()))
|
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||||
.thenReturn(startResponse);
|
.thenReturn(startResponse);
|
||||||
when(myJobRunner.getJobInfo(anyString()))
|
when(myJobRunner.getJobInfo(anyString()))
|
||||||
.thenReturn(jobInfo);
|
.thenReturn(jobInfo);
|
||||||
|
|
|
@ -48,17 +48,8 @@ public class ResourceProviderExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertGone(IIdType theId) {
|
|
||||||
try {
|
|
||||||
getDao(theId).read(theId);
|
|
||||||
fail();
|
|
||||||
} catch (ResourceGoneException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertStillThere(IIdType theId) {
|
private void assertStillThere(IIdType theId) {
|
||||||
getDao(theId).read(theId);
|
assertNotGone(theId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -72,6 +72,7 @@ import org.hl7.fhir.r4.model.DecimalType;
|
||||||
import org.hl7.fhir.r4.model.DiagnosticReport;
|
import org.hl7.fhir.r4.model.DiagnosticReport;
|
||||||
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
|
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
|
||||||
import org.hl7.fhir.r4.model.IdType;
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
|
import org.hl7.fhir.r4.model.IntegerType;
|
||||||
import org.hl7.fhir.r4.model.Observation;
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
import org.hl7.fhir.r4.model.OperationDefinition;
|
import org.hl7.fhir.r4.model.OperationDefinition;
|
||||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||||
|
@ -956,7 +957,7 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "Observation?subject.active=false");
|
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "Observation?subject.active=false");
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "DiagnosticReport?subject.active=false");
|
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "DiagnosticReport?subject.active=false");
|
||||||
int batchSize = 2;
|
int batchSize = 2;
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize));
|
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new IntegerType(batchSize));
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
|
|
||||||
|
|
|
@ -36,16 +36,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -66,11 +56,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -100,20 +86,6 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
|
||||||
<id>CI</id>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
</profiles>
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -41,16 +41,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -71,11 +61,7 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
<argLine>@{argLine} ${surefire_jvm_args}</argLine>
|
||||||
<forkCount>0.6C</forkCount>
|
|
||||||
<excludes>*StressTest*</excludes>
|
|
||||||
<skip>${skipFailsafe}</skip>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -105,20 +91,6 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
|
||||||
<id>CI</id>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
</profiles>
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -262,16 +262,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<!--<useManifestOnlyJar>false</useManifestOnlyJar>-->
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
@ -326,20 +316,6 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
|
||||||
<id>CI</id>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
</profiles>
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -26,6 +26,8 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||||
|
@ -34,41 +36,9 @@ import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao;
|
import ca.uhn.fhir.jpa.model.entity.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamNumberDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||||
|
@ -82,6 +52,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||||
|
@ -91,11 +62,7 @@ import ca.uhn.fhir.test.utilities.LoggingExtension;
|
||||||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||||
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
import ca.uhn.fhir.test.utilities.server.SpringContextGrabbingTestExecutionListener;
|
import ca.uhn.fhir.test.utilities.server.SpringContextGrabbingTestExecutionListener;
|
||||||
import ca.uhn.fhir.util.BundleUtil;
|
import ca.uhn.fhir.util.*;
|
||||||
import ca.uhn.fhir.util.ClasspathUtil;
|
|
||||||
import ca.uhn.fhir.util.FhirVersionIndependentConcept;
|
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
import org.hibernate.SessionFactory;
|
import org.hibernate.SessionFactory;
|
||||||
|
@ -131,13 +98,7 @@ import javax.persistence.EntityManager;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.time.temporal.ChronoUnit;
|
import java.time.temporal.ChronoUnit;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
@ -147,9 +108,7 @@ import java.util.stream.Stream;
|
||||||
import static ca.uhn.fhir.util.TestUtil.doRandomizeLocaleAndTimezone;
|
import static ca.uhn.fhir.util.TestUtil.doRandomizeLocaleAndTimezone;
|
||||||
import static java.util.stream.Collectors.joining;
|
import static java.util.stream.Collectors.joining;
|
||||||
import static org.awaitility.Awaitility.await;
|
import static org.awaitility.Awaitility.await;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
import static org.mockito.Mockito.lenient;
|
import static org.mockito.Mockito.lenient;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -253,8 +212,104 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IForcedIdDao myForcedIdDao;
|
private IForcedIdDao myForcedIdDao;
|
||||||
|
@Autowired
|
||||||
|
private DaoRegistry myDaoRegistry;
|
||||||
private List<Object> myRegisteredInterceptors = new ArrayList<>(1);
|
private List<Object> myRegisteredInterceptors = new ArrayList<>(1);
|
||||||
|
|
||||||
|
@SuppressWarnings("BusyWait")
|
||||||
|
public static void waitForSize(int theTarget, List<?> theList) {
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
|
while (theList.size() != theTarget && sw.getMillis() <= 16000) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(50);
|
||||||
|
} catch (InterruptedException theE) {
|
||||||
|
throw new Error(theE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (sw.getMillis() >= 16000 || theList.size() > theTarget) {
|
||||||
|
String describeResults = theList
|
||||||
|
.stream()
|
||||||
|
.map(t -> {
|
||||||
|
if (t == null) {
|
||||||
|
return "null";
|
||||||
|
}
|
||||||
|
if (t instanceof IBaseResource) {
|
||||||
|
return ((IBaseResource) t).getIdElement().getValue();
|
||||||
|
}
|
||||||
|
return t.toString();
|
||||||
|
})
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
fail("Size " + theList.size() + " is != target " + theTarget + " - Got: " + describeResults);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeClassRandomizeLocale() {
|
||||||
|
doRandomizeLocaleAndTimezone();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("BusyWait")
|
||||||
|
protected static void purgeDatabase(JpaStorageSettings theStorageSettings, IFhirSystemDao<?, ?> theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry, IBulkDataExportJobSchedulingHelper theBulkDataJobActivator) {
|
||||||
|
theSearchCoordinatorSvc.cancelAllActiveSearches();
|
||||||
|
theResourceReindexingSvc.cancelAndPurgeAllJobs();
|
||||||
|
theBulkDataJobActivator.cancelAndPurgeAllJobs();
|
||||||
|
|
||||||
|
boolean expungeEnabled = theStorageSettings.isExpungeEnabled();
|
||||||
|
boolean multiDeleteEnabled = theStorageSettings.isAllowMultipleDelete();
|
||||||
|
theStorageSettings.setExpungeEnabled(true);
|
||||||
|
theStorageSettings.setAllowMultipleDelete(true);
|
||||||
|
|
||||||
|
for (int count = 0; ; count++) {
|
||||||
|
try {
|
||||||
|
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), new SystemRequestDetails());
|
||||||
|
break;
|
||||||
|
} catch (Exception e) {
|
||||||
|
if (count >= 3) {
|
||||||
|
ourLog.error("Failed during expunge", e);
|
||||||
|
fail(e.toString());
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
Thread.sleep(1000);
|
||||||
|
} catch (InterruptedException e2) {
|
||||||
|
fail(e2.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
theStorageSettings.setExpungeEnabled(expungeEnabled);
|
||||||
|
theStorageSettings.setAllowMultipleDelete(multiDeleteEnabled);
|
||||||
|
|
||||||
|
theSearchParamRegistry.forceRefresh();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static Set<String> toCodes(Set<TermConcept> theConcepts) {
|
||||||
|
HashSet<String> retVal = new HashSet<>();
|
||||||
|
for (TermConcept next : theConcepts) {
|
||||||
|
retVal.add(next.getCode());
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static Set<String> toCodes(List<FhirVersionIndependentConcept> theConcepts) {
|
||||||
|
HashSet<String> retVal = new HashSet<>();
|
||||||
|
for (FhirVersionIndependentConcept next : theConcepts) {
|
||||||
|
retVal.add(next.getCode());
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void waitForSize(int theTarget, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
|
||||||
|
waitForSize(theTarget, 10000, theCallable, theFailureMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("BusyWait")
|
||||||
|
public static void waitForSize(int theTarget, int theTimeoutMillis, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
|
||||||
|
await()
|
||||||
|
.alias("Waiting for size " + theTarget + ". Current size is " + theCallable.call().intValue() + ": " + theFailureMessage.call())
|
||||||
|
.atMost(Duration.of(theTimeoutMillis, ChronoUnit.MILLIS))
|
||||||
|
.until(() -> theCallable.call().intValue() == theTarget);
|
||||||
|
}
|
||||||
|
|
||||||
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
|
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
|
||||||
return ClasspathUtil.loadResource(myFhirContext, type, resourceName);
|
return ClasspathUtil.loadResource(myFhirContext, type, resourceName);
|
||||||
}
|
}
|
||||||
|
@ -360,7 +415,6 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
protected void logAllResourceLinks() {
|
protected void logAllResourceLinks() {
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(ResourceLink::toString).collect(Collectors.joining("\n * ")));
|
ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(ResourceLink::toString).collect(Collectors.joining("\n * ")));
|
||||||
|
@ -751,97 +805,35 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
myRegisteredInterceptors.clear();
|
myRegisteredInterceptors.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("BusyWait")
|
/**
|
||||||
public static void waitForSize(int theTarget, List<?> theList) {
|
* Asserts that the resource with {@literal theId} is deleted
|
||||||
StopWatch sw = new StopWatch();
|
*/
|
||||||
while (theList.size() != theTarget && sw.getMillis() <= 16000) {
|
protected void assertGone(IIdType theId) {
|
||||||
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theId.getResourceType());
|
||||||
|
IBaseResource result = dao.read(theId, mySrd, true);
|
||||||
|
assertTrue(result.isDeleted());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asserts that the resource with {@literal theId} exists and is not deleted
|
||||||
|
*/
|
||||||
|
protected void assertNotGone(IIdType theId) {
|
||||||
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theId.getResourceType());
|
||||||
|
assertNotNull(dao.read(theId, mySrd));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asserts that the resource with {@literal theId} does not exist (i.e. not that
|
||||||
|
* it exists but that it was deleted, but rather that the ID doesn't exist at all).
|
||||||
|
* This can be used to test that a resource was expunged.
|
||||||
|
*/
|
||||||
|
protected void assertDoesntExist(IIdType theId) {
|
||||||
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theId.getResourceType());
|
||||||
try {
|
try {
|
||||||
Thread.sleep(50);
|
dao.read(theId, mySrd);
|
||||||
} catch (InterruptedException theE) {
|
fail();
|
||||||
throw new Error(theE);
|
} catch (ResourceNotFoundException e) {
|
||||||
}
|
// good
|
||||||
}
|
|
||||||
if (sw.getMillis() >= 16000 || theList.size() > theTarget) {
|
|
||||||
String describeResults = theList
|
|
||||||
.stream()
|
|
||||||
.map(t -> {
|
|
||||||
if (t == null) {
|
|
||||||
return "null";
|
|
||||||
}
|
|
||||||
if (t instanceof IBaseResource) {
|
|
||||||
return ((IBaseResource) t).getIdElement().getValue();
|
|
||||||
}
|
|
||||||
return t.toString();
|
|
||||||
})
|
|
||||||
.collect(Collectors.joining(", "));
|
|
||||||
fail("Size " + theList.size() + " is != target " + theTarget + " - Got: " + describeResults);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
public static void beforeClassRandomizeLocale() {
|
|
||||||
doRandomizeLocaleAndTimezone();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("BusyWait")
|
|
||||||
protected static void purgeDatabase(JpaStorageSettings theStorageSettings, IFhirSystemDao<?, ?> theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry, IBulkDataExportJobSchedulingHelper theBulkDataJobActivator) {
|
|
||||||
theSearchCoordinatorSvc.cancelAllActiveSearches();
|
|
||||||
theResourceReindexingSvc.cancelAndPurgeAllJobs();
|
|
||||||
theBulkDataJobActivator.cancelAndPurgeAllJobs();
|
|
||||||
|
|
||||||
boolean expungeEnabled = theStorageSettings.isExpungeEnabled();
|
|
||||||
boolean multiDeleteEnabled = theStorageSettings.isAllowMultipleDelete();
|
|
||||||
theStorageSettings.setExpungeEnabled(true);
|
|
||||||
theStorageSettings.setAllowMultipleDelete(true);
|
|
||||||
|
|
||||||
for (int count = 0; ; count++) {
|
|
||||||
try {
|
|
||||||
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), new SystemRequestDetails());
|
|
||||||
break;
|
|
||||||
} catch (Exception e) {
|
|
||||||
if (count >= 3) {
|
|
||||||
ourLog.error("Failed during expunge", e);
|
|
||||||
fail(e.toString());
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
Thread.sleep(1000);
|
|
||||||
} catch (InterruptedException e2) {
|
|
||||||
fail(e2.toString());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
theStorageSettings.setExpungeEnabled(expungeEnabled);
|
|
||||||
theStorageSettings.setAllowMultipleDelete(multiDeleteEnabled);
|
|
||||||
|
|
||||||
theSearchParamRegistry.forceRefresh();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static Set<String> toCodes(Set<TermConcept> theConcepts) {
|
|
||||||
HashSet<String> retVal = new HashSet<>();
|
|
||||||
for (TermConcept next : theConcepts) {
|
|
||||||
retVal.add(next.getCode());
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static Set<String> toCodes(List<FhirVersionIndependentConcept> theConcepts) {
|
|
||||||
HashSet<String> retVal = new HashSet<>();
|
|
||||||
for (FhirVersionIndependentConcept next : theConcepts) {
|
|
||||||
retVal.add(next.getCode());
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void waitForSize(int theTarget, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
|
|
||||||
waitForSize(theTarget, 10000, theCallable, theFailureMessage);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("BusyWait")
|
|
||||||
public static void waitForSize(int theTarget, int theTimeoutMillis, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
|
|
||||||
await()
|
|
||||||
.alias("Waiting for size " + theTarget + ". Current size is " + theCallable.call().intValue() + ": " + theFailureMessage.call())
|
|
||||||
.atMost(Duration.of(theTimeoutMillis, ChronoUnit.MILLIS))
|
|
||||||
.until(() -> theCallable.call().intValue() == theTarget);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -92,7 +92,21 @@ public class Batch2JobHelper {
|
||||||
try {
|
try {
|
||||||
await()
|
await()
|
||||||
.atMost(theSecondsToWait, TimeUnit.SECONDS)
|
.atMost(theSecondsToWait, TimeUnit.SECONDS)
|
||||||
.until(() -> checkStatusWithMaintenancePass(theBatchJobId, theExpectedStatus));
|
.until(() -> {
|
||||||
|
boolean inFinalStatus = false;
|
||||||
|
if (ArrayUtils.contains(theExpectedStatus, StatusEnum.COMPLETED) && !ArrayUtils.contains(theExpectedStatus, StatusEnum.FAILED)) {
|
||||||
|
inFinalStatus = hasStatus(theBatchJobId, StatusEnum.FAILED);
|
||||||
|
}
|
||||||
|
if (ArrayUtils.contains(theExpectedStatus, StatusEnum.FAILED) && !ArrayUtils.contains(theExpectedStatus, StatusEnum.COMPLETED)) {
|
||||||
|
inFinalStatus = hasStatus(theBatchJobId, StatusEnum.COMPLETED);
|
||||||
|
}
|
||||||
|
boolean retVal = checkStatusWithMaintenancePass(theBatchJobId, theExpectedStatus);
|
||||||
|
if (!retVal && inFinalStatus) {
|
||||||
|
// Fail fast - If we hit one of these statuses and it's not the one we want, abort
|
||||||
|
throw new ConditionTimeoutException("Already in failed/completed status");
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
});
|
||||||
} catch (ConditionTimeoutException e) {
|
} catch (ConditionTimeoutException e) {
|
||||||
String statuses = myJobPersistence.fetchInstances(100, 0)
|
String statuses = myJobPersistence.fetchInstances(100, 0)
|
||||||
.stream()
|
.stream()
|
||||||
|
@ -130,7 +144,7 @@ public class Batch2JobHelper {
|
||||||
return hasStatus(theBatchJobId, theExpectedStatuses);
|
return hasStatus(theBatchJobId, theExpectedStatuses);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean hasStatus(String theBatchJobId, StatusEnum[] theExpectedStatuses) {
|
private boolean hasStatus(String theBatchJobId, StatusEnum... theExpectedStatuses) {
|
||||||
StatusEnum status = getStatus(theBatchJobId);
|
StatusEnum status = getStatus(theBatchJobId);
|
||||||
ourLog.debug("Checking status of {} in {}: is {}", theBatchJobId, theExpectedStatuses, status);
|
ourLog.debug("Checking status of {} in {}: is {}", theBatchJobId, theExpectedStatuses, status);
|
||||||
return ArrayUtils.contains(theExpectedStatuses, status);
|
return ArrayUtils.contains(theExpectedStatuses, status);
|
||||||
|
|
|
@ -1,76 +0,0 @@
|
||||||
package ca.uhn.fhir.jpa.delete.provider;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
|
||||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
|
||||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
|
||||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
|
||||||
import org.apache.http.client.methods.HttpPost;
|
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
|
||||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
|
||||||
import org.eclipse.jetty.server.Server;
|
|
||||||
import org.eclipse.jetty.servlet.ServletHandler;
|
|
||||||
import org.eclipse.jetty.servlet.ServletHolder;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
|
||||||
import org.mockito.Mock;
|
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.charset.Charset;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.is;
|
|
||||||
|
|
||||||
@ExtendWith(MockitoExtension.class)
|
|
||||||
class DeleteExpungeProviderTest {
|
|
||||||
|
|
||||||
@Mock
|
|
||||||
private IDeleteExpungeJobSubmitter myJobSubmitter;
|
|
||||||
private Server myServer;
|
|
||||||
private FhirContext myCtx;
|
|
||||||
private int myPort;
|
|
||||||
private CloseableHttpClient myClient;
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void start() throws Exception {
|
|
||||||
myCtx = FhirContext.forR4Cached();
|
|
||||||
myServer = new Server(0);
|
|
||||||
|
|
||||||
DeleteExpungeProvider provider = new DeleteExpungeProvider(myCtx, myJobSubmitter);
|
|
||||||
|
|
||||||
ServletHandler proxyHandler = new ServletHandler();
|
|
||||||
RestfulServer servlet = new RestfulServer(myCtx);
|
|
||||||
servlet.registerProvider(provider);
|
|
||||||
ServletHolder servletHolder = new ServletHolder(servlet);
|
|
||||||
proxyHandler.addServletWithMapping(servletHolder, "/*");
|
|
||||||
myServer.setHandler(proxyHandler);
|
|
||||||
JettyUtil.startServer(myServer);
|
|
||||||
myPort = JettyUtil.getPortForStartedServer(myServer);
|
|
||||||
|
|
||||||
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
|
|
||||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
|
||||||
builder.setConnectionManager(connectionManager);
|
|
||||||
myClient = builder.build();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSupplyingNoUrlsProvidesValidErrorMessage() throws IOException {
|
|
||||||
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + ProviderConstants.OPERATION_DELETE_EXPUNGE);
|
|
||||||
try(CloseableHttpResponse execute = myClient.execute(post)) {
|
|
||||||
String body = IOUtils.toString(execute.getEntity().getContent(), Charset.defaultCharset());
|
|
||||||
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
|
|
||||||
assertThat(body, is(containsString("At least one `url` parameter to $delete-expunge must be provided.")));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -40,7 +40,7 @@ class Batch2JobHelperTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void awaitJobCompletion_inProgress_callsMaintenance() {
|
void awaitJobCompletion_inProgress_callsMaintenance() {
|
||||||
when(myJobCoordinator.getInstance(JOB_ID)).thenReturn(ourIncompleteInstance, ourCompleteInstance);
|
when(myJobCoordinator.getInstance(JOB_ID)).thenReturn(ourIncompleteInstance, ourIncompleteInstance, ourIncompleteInstance, ourCompleteInstance);
|
||||||
|
|
||||||
myBatch2JobHelper.awaitJobCompletion(JOB_ID);
|
myBatch2JobHelper.awaitJobCompletion(JOB_ID);
|
||||||
verify(myJobMaintenanceService, times(1)).runMaintenancePass();
|
verify(myJobMaintenanceService, times(1)).runMaintenancePass();
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
<configuration>
|
||||||
|
|
||||||
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
|
<encoder>
|
||||||
|
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] - %msg%n
|
||||||
|
</pattern>
|
||||||
|
</encoder>
|
||||||
|
</appender>
|
||||||
|
|
||||||
|
<root level="info">
|
||||||
|
<appender-ref ref="STDOUT" />
|
||||||
|
</root>
|
||||||
|
|
||||||
|
</configuration>
|
|
@ -15,7 +15,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.event.ContextRefreshedEvent;
|
import org.springframework.context.event.ContextRefreshedEvent;
|
||||||
import org.springframework.context.event.EventListener;
|
import org.springframework.context.event.EventListener;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -48,7 +47,7 @@ public class OldAuditEventPurgeService {
|
||||||
|
|
||||||
ourLog.info("Submitting an AuditEvent purge job with URL: {}", url);
|
ourLog.info("Submitting an AuditEvent purge job with URL: {}", url);
|
||||||
|
|
||||||
myDeleteExpungeSubmitter.submitJob(1000, List.of(url), new SystemRequestDetails());
|
myDeleteExpungeSubmitter.submitJob(1000, List.of(url), false, null, new SystemRequestDetails());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class OldAuditEventPurgeServiceJob implements HapiJob {
|
public static class OldAuditEventPurgeServiceJob implements HapiJob {
|
||||||
|
|
|
@ -126,15 +126,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<forkCount>1</forkCount>
|
|
||||||
<reuseForks>false</reuseForks>
|
|
||||||
<runOrder>alphabetical</runOrder>
|
|
||||||
<includes>
|
|
||||||
<include>**/*IT.java</include>
|
|
||||||
</includes>
|
|
||||||
<useModulePath>false</useModulePath>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
|
|
@ -30,5 +30,5 @@ public interface IDeleteExpungeJobSubmitter {
|
||||||
* @param theUrlsToProcess A list of strings of the form "/Patient?active=true"
|
* @param theUrlsToProcess A list of strings of the form "/Patient?active=true"
|
||||||
* @return The Batch2 JobId that was started to run this batch job
|
* @return The Batch2 JobId that was started to run this batch job
|
||||||
*/
|
*/
|
||||||
String submitJob(Integer theBatchSize, List<String> theUrlsToProcess, RequestDetails theRequest);
|
String submitJob(Integer theBatchSize, List<String> theUrlsToProcess, boolean theCascade, Integer theCascadeMaxRounds, RequestDetails theRequest);
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,16 +29,7 @@ import ca.uhn.fhir.model.api.Include;
|
||||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||||
import ca.uhn.fhir.parser.IParser;
|
import ca.uhn.fhir.parser.IParser;
|
||||||
import ca.uhn.fhir.rest.api.BundleLinks;
|
import ca.uhn.fhir.rest.api.*;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
|
||||||
import ca.uhn.fhir.rest.api.DeleteCascadeModeEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.PreferHandlingEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.server.IRestfulResponse;
|
import ca.uhn.fhir.rest.api.server.IRestfulResponse;
|
||||||
import ca.uhn.fhir.rest.api.server.IRestfulServer;
|
import ca.uhn.fhir.rest.api.server.IRestfulServer;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
@ -49,17 +40,11 @@ import ca.uhn.fhir.rest.server.method.SummaryEnumParameter;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
import ca.uhn.fhir.util.BinaryUtil;
|
import ca.uhn.fhir.util.BinaryUtil;
|
||||||
import ca.uhn.fhir.util.DateUtils;
|
import ca.uhn.fhir.util.DateUtils;
|
||||||
import ca.uhn.fhir.util.IoUtil;
|
|
||||||
import ca.uhn.fhir.util.UrlUtil;
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
import org.hl7.fhir.instance.model.api.*;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
|
||||||
import org.hl7.fhir.instance.model.api.IDomainResource;
|
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -67,28 +52,12 @@ import javax.servlet.http.HttpServletRequest;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.Enumeration;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.StringTokenizer;
|
|
||||||
import java.util.TreeSet;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.*;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|
||||||
import static org.apache.commons.lang3.StringUtils.replace;
|
|
||||||
import static org.apache.commons.lang3.StringUtils.trim;
|
|
||||||
|
|
||||||
public class RestfulServerUtils {
|
public class RestfulServerUtils {
|
||||||
static final Pattern ACCEPT_HEADER_PATTERN = Pattern.compile("\\s*([a-zA-Z0-9+.*/-]+)\\s*(;\\s*([a-zA-Z]+)\\s*=\\s*([a-zA-Z0-9.]+)\\s*)?(,?)");
|
static final Pattern ACCEPT_HEADER_PATTERN = Pattern.compile("\\s*([a-zA-Z0-9+.*/-]+)\\s*(;\\s*([a-zA-Z]+)\\s*=\\s*([a-zA-Z0-9.]+)\\s*)?(,?)");
|
||||||
|
@ -99,63 +68,6 @@ public class RestfulServerUtils {
|
||||||
private static Map<FhirVersionEnum, FhirContext> myFhirContextMap = Collections.synchronizedMap(new HashMap<>());
|
private static Map<FhirVersionEnum, FhirContext> myFhirContextMap = Collections.synchronizedMap(new HashMap<>());
|
||||||
private static EnumSet<RestOperationTypeEnum> ourOperationsWhichAllowPreferHeader = EnumSet.of(RestOperationTypeEnum.CREATE, RestOperationTypeEnum.UPDATE, RestOperationTypeEnum.PATCH);
|
private static EnumSet<RestOperationTypeEnum> ourOperationsWhichAllowPreferHeader = EnumSet.of(RestOperationTypeEnum.CREATE, RestOperationTypeEnum.UPDATE, RestOperationTypeEnum.PATCH);
|
||||||
|
|
||||||
private enum NarrativeModeEnum {
|
|
||||||
NORMAL, ONLY, SUPPRESS;
|
|
||||||
|
|
||||||
public static NarrativeModeEnum valueOfCaseInsensitive(String theCode) {
|
|
||||||
return valueOf(NarrativeModeEnum.class, theCode.toUpperCase());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return type for {@link RestfulServerUtils#determineRequestEncodingNoDefault(RequestDetails)}
|
|
||||||
*/
|
|
||||||
public static class ResponseEncoding {
|
|
||||||
private final String myContentType;
|
|
||||||
private final EncodingEnum myEncoding;
|
|
||||||
private final Boolean myNonLegacy;
|
|
||||||
|
|
||||||
public ResponseEncoding(FhirContext theCtx, EncodingEnum theEncoding, String theContentType) {
|
|
||||||
super();
|
|
||||||
myEncoding = theEncoding;
|
|
||||||
myContentType = theContentType;
|
|
||||||
if (theContentType != null) {
|
|
||||||
FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion();
|
|
||||||
if (theContentType.equals(EncodingEnum.JSON_PLAIN_STRING) || theContentType.equals(EncodingEnum.XML_PLAIN_STRING)) {
|
|
||||||
myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1);
|
|
||||||
} else {
|
|
||||||
myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1) && !EncodingEnum.isLegacy(theContentType);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion();
|
|
||||||
if (ctxtEnum.isOlderThan(FhirVersionEnum.DSTU3)) {
|
|
||||||
myNonLegacy = null;
|
|
||||||
} else {
|
|
||||||
myNonLegacy = Boolean.TRUE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getContentType() {
|
|
||||||
return myContentType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public EncodingEnum getEncoding() {
|
|
||||||
return myEncoding;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getResourceContentType() {
|
|
||||||
if (Boolean.TRUE.equals(isNonLegacy())) {
|
|
||||||
return getEncoding().getResourceContentTypeNonLegacy();
|
|
||||||
}
|
|
||||||
return getEncoding().getResourceContentType();
|
|
||||||
}
|
|
||||||
|
|
||||||
Boolean isNonLegacy() {
|
|
||||||
return myNonLegacy;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||||
public static void configureResponseParser(RequestDetails theRequestDetails, IParser parser) {
|
public static void configureResponseParser(RequestDetails theRequestDetails, IParser parser) {
|
||||||
// Pretty print
|
// Pretty print
|
||||||
|
@ -251,7 +163,6 @@ public class RestfulServerUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static String createLinkSelf(String theServerBase, RequestDetails theRequest) {
|
public static String createLinkSelf(String theServerBase, RequestDetails theRequest) {
|
||||||
return createLinkSelfWithoutGivenParameters(theServerBase, theRequest, null);
|
return createLinkSelfWithoutGivenParameters(theServerBase, theRequest, null);
|
||||||
}
|
}
|
||||||
|
@ -866,7 +777,6 @@ public class RestfulServerUtils {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static boolean prettyPrintResponse(IRestfulServerDefaults theServer, RequestDetails theRequest) {
|
public static boolean prettyPrintResponse(IRestfulServerDefaults theServer, RequestDetails theRequest) {
|
||||||
Map<String, String[]> requestParams = theRequest.getParameters();
|
Map<String, String[]> requestParams = theRequest.getParameters();
|
||||||
String[] pretty = requestParams.get(Constants.PARAM_PRETTY);
|
String[] pretty = requestParams.get(Constants.PARAM_PRETTY);
|
||||||
|
@ -1065,7 +975,7 @@ public class RestfulServerUtils {
|
||||||
* - If the binary was externalized and has not been reinflated upstream, return false.
|
* - If the binary was externalized and has not been reinflated upstream, return false.
|
||||||
* - If they request octet-stream, return true;
|
* - If they request octet-stream, return true;
|
||||||
* - If the content-type happens to be a match, return true.
|
* - If the content-type happens to be a match, return true.
|
||||||
*
|
* <p>
|
||||||
* - Construct an EncodingEnum out of the contentType. If this matches the responseEncoding, return true.
|
* - Construct an EncodingEnum out of the contentType. If this matches the responseEncoding, return true.
|
||||||
* - Otherwise, return false.
|
* - Otherwise, return false.
|
||||||
*
|
*
|
||||||
|
@ -1102,7 +1012,7 @@ public class RestfulServerUtils {
|
||||||
try {
|
try {
|
||||||
return Integer.parseInt(retVal[0]);
|
return Integer.parseInt(retVal[0]);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
ourLog.debug("Failed to parse {} value '{}': {}", new Object[]{theParamName, retVal[0], e});
|
ourLog.debug("Failed to parse {} value '{}': {}", theParamName, retVal[0], e.toString());
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1113,23 +1023,136 @@ public class RestfulServerUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @since 5.0.0
|
* @since 5.0.0
|
||||||
*/
|
*/
|
||||||
public static DeleteCascadeModeEnum extractDeleteCascadeParameter(RequestDetails theRequest) {
|
public static DeleteCascadeDetails extractDeleteCascadeParameter(RequestDetails theRequest) {
|
||||||
|
DeleteCascadeModeEnum mode = null;
|
||||||
|
Integer maxRounds = null;
|
||||||
if (theRequest != null) {
|
if (theRequest != null) {
|
||||||
String[] cascadeParameters = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE);
|
String[] cascadeParameters = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE);
|
||||||
if (cascadeParameters != null && Arrays.asList(cascadeParameters).contains(Constants.CASCADE_DELETE)) {
|
if (cascadeParameters != null && Arrays.asList(cascadeParameters).contains(Constants.CASCADE_DELETE)) {
|
||||||
return DeleteCascadeModeEnum.DELETE;
|
mode = DeleteCascadeModeEnum.DELETE;
|
||||||
|
String[] maxRoundsValues = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS);
|
||||||
|
if (maxRoundsValues != null && maxRoundsValues.length > 0) {
|
||||||
|
String maxRoundsString = maxRoundsValues[0];
|
||||||
|
maxRounds = parseMaxRoundsString(maxRoundsString);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (mode == null) {
|
||||||
String cascadeHeader = theRequest.getHeader(Constants.HEADER_CASCADE);
|
String cascadeHeader = theRequest.getHeader(Constants.HEADER_CASCADE);
|
||||||
if (Constants.CASCADE_DELETE.equals(cascadeHeader)) {
|
if (isNotBlank(cascadeHeader)) {
|
||||||
return DeleteCascadeModeEnum.DELETE;
|
if (Constants.CASCADE_DELETE.equals(cascadeHeader) || cascadeHeader.startsWith(Constants.CASCADE_DELETE + ";") || cascadeHeader.startsWith(Constants.CASCADE_DELETE + " ")) {
|
||||||
|
mode = DeleteCascadeModeEnum.DELETE;
|
||||||
|
|
||||||
|
if (cascadeHeader.contains(";")) {
|
||||||
|
String remainder = cascadeHeader.substring(cascadeHeader.indexOf(';') + 1);
|
||||||
|
remainder = trim(remainder);
|
||||||
|
if (remainder.startsWith(Constants.HEADER_CASCADE_MAX_ROUNDS + "=")) {
|
||||||
|
String maxRoundsString = remainder.substring(Constants.HEADER_CASCADE_MAX_ROUNDS.length() + 1);
|
||||||
|
maxRounds = parseMaxRoundsString(maxRoundsString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return DeleteCascadeModeEnum.NONE;
|
if (mode == null) {
|
||||||
|
mode = DeleteCascadeModeEnum.NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DeleteCascadeDetails(mode, maxRounds);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private static Integer parseMaxRoundsString(String theMaxRoundsString) {
|
||||||
|
Integer maxRounds;
|
||||||
|
if (isBlank(theMaxRoundsString)) {
|
||||||
|
maxRounds = null;
|
||||||
|
} else if (NumberUtils.isDigits(theMaxRoundsString)) {
|
||||||
|
maxRounds = Integer.parseInt(theMaxRoundsString);
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException(Msg.code(2349) + "Invalid value for " + Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS + " parameter");
|
||||||
|
}
|
||||||
|
return maxRounds;
|
||||||
|
}
|
||||||
|
|
||||||
|
private enum NarrativeModeEnum {
|
||||||
|
NORMAL, ONLY, SUPPRESS;
|
||||||
|
|
||||||
|
public static NarrativeModeEnum valueOfCaseInsensitive(String theCode) {
|
||||||
|
return valueOf(NarrativeModeEnum.class, theCode.toUpperCase());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return type for {@link RestfulServerUtils#determineRequestEncodingNoDefault(RequestDetails)}
|
||||||
|
*/
|
||||||
|
public static class ResponseEncoding {
|
||||||
|
private final String myContentType;
|
||||||
|
private final EncodingEnum myEncoding;
|
||||||
|
private final Boolean myNonLegacy;
|
||||||
|
|
||||||
|
public ResponseEncoding(FhirContext theCtx, EncodingEnum theEncoding, String theContentType) {
|
||||||
|
super();
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
myContentType = theContentType;
|
||||||
|
if (theContentType != null) {
|
||||||
|
FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion();
|
||||||
|
if (theContentType.equals(EncodingEnum.JSON_PLAIN_STRING) || theContentType.equals(EncodingEnum.XML_PLAIN_STRING)) {
|
||||||
|
myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1);
|
||||||
|
} else {
|
||||||
|
myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1) && !EncodingEnum.isLegacy(theContentType);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion();
|
||||||
|
if (ctxtEnum.isOlderThan(FhirVersionEnum.DSTU3)) {
|
||||||
|
myNonLegacy = null;
|
||||||
|
} else {
|
||||||
|
myNonLegacy = Boolean.TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getContentType() {
|
||||||
|
return myContentType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResourceContentType() {
|
||||||
|
if (Boolean.TRUE.equals(isNonLegacy())) {
|
||||||
|
return getEncoding().getResourceContentTypeNonLegacy();
|
||||||
|
}
|
||||||
|
return getEncoding().getResourceContentType();
|
||||||
|
}
|
||||||
|
|
||||||
|
Boolean isNonLegacy() {
|
||||||
|
return myNonLegacy;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class DeleteCascadeDetails {
|
||||||
|
|
||||||
|
private final DeleteCascadeModeEnum myMode;
|
||||||
|
private final Integer myMaxRounds;
|
||||||
|
|
||||||
|
public DeleteCascadeDetails(DeleteCascadeModeEnum theMode, Integer theMaxRounds) {
|
||||||
|
myMode = theMode;
|
||||||
|
myMaxRounds = theMaxRounds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DeleteCascadeModeEnum getMode() {
|
||||||
|
return myMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getMaxRounds() {
|
||||||
|
return myMaxRounds;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -251,7 +251,7 @@ public class OperationParameter implements IParameter {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException {
|
public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException {
|
||||||
List<Object> matchingParamValues = new ArrayList<Object>();
|
List<Object> matchingParamValues = new ArrayList<>();
|
||||||
|
|
||||||
OperationMethodBinding method = (OperationMethodBinding) theMethodBinding;
|
OperationMethodBinding method = (OperationMethodBinding) theMethodBinding;
|
||||||
|
|
||||||
|
|
|
@ -161,6 +161,14 @@ public class ProviderConstants {
|
||||||
* Number of resources to delete at a time for the $delete-expunge operation
|
* Number of resources to delete at a time for the $delete-expunge operation
|
||||||
*/
|
*/
|
||||||
public static final String OPERATION_DELETE_BATCH_SIZE = "batchSize";
|
public static final String OPERATION_DELETE_BATCH_SIZE = "batchSize";
|
||||||
|
/**
|
||||||
|
* Should we cascade the $delete-expunge operation
|
||||||
|
*/
|
||||||
|
public static final String OPERATION_DELETE_CASCADE = "cascade";
|
||||||
|
/**
|
||||||
|
* How many rounds for the $delete-expunge operation
|
||||||
|
*/
|
||||||
|
public static final String OPERATION_DELETE_CASCADE_MAX_ROUNDS = "cascadeMaxRounds";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Spring Batch job id of the delete expunge job created by a $delete-expunge operation
|
* The Spring Batch job id of the delete expunge job created by a $delete-expunge operation
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
package ca.uhn.fhir.rest.server;
|
package ca.uhn.fhir.rest.server;
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.PreferHandlingEnum;
|
import ca.uhn.fhir.rest.api.*;
|
||||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.CsvSource;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -12,15 +17,19 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static ca.uhn.fhir.rest.api.RequestTypeEnum.GET;
|
import static ca.uhn.fhir.rest.api.RequestTypeEnum.GET;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
import static org.hamcrest.CoreMatchers.is;
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.mockito.Mockito.when;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class RestfulServerUtilsTest {
|
public class RestfulServerUtilsTest {
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private RequestDetails myRequestDetails;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParsePreferReturn() {
|
public void testParsePreferReturn() {
|
||||||
PreferHeader header = RestfulServerUtils.parsePreferHeader(null, "return=representation");
|
PreferHeader header = RestfulServerUtils.parsePreferHeader(null, "return=representation");
|
||||||
|
@ -66,6 +75,49 @@ public class RestfulServerUtilsTest{
|
||||||
assertEquals(PreferHandlingEnum.LENIENT, header.getHanding());
|
assertEquals(PreferHandlingEnum.LENIENT, header.getHanding());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@CsvSource({
|
||||||
|
" , , , NONE ,",
|
||||||
|
"foo , , , NONE , ",
|
||||||
|
"delete , , , DELETE ,",
|
||||||
|
"delete , 10 , , DELETE , 10",
|
||||||
|
"delete , abc , , DELETE , -1", // -1 means exception
|
||||||
|
" , , delete , DELETE ,",
|
||||||
|
" , , delete; , DELETE ,",
|
||||||
|
" , , delete; max-rounds= , DELETE , ",
|
||||||
|
" , , delete; max-rounds , DELETE , ",
|
||||||
|
" , , delete; max-rounds=10 , DELETE , 10",
|
||||||
|
" , , delete; max-rounds=10 , DELETE , 10",
|
||||||
|
})
|
||||||
|
public void testParseCascade(String theCascadeParam, String theCascadeMaxRoundsParam, String theCascadeHeader, DeleteCascadeModeEnum theExpectedMode, Integer theExpectedMaxRounds) {
|
||||||
|
HashMap<String, String[]> params = new HashMap<>();
|
||||||
|
when(myRequestDetails.getParameters()).thenReturn(params);
|
||||||
|
|
||||||
|
if (isNotBlank(theCascadeParam)) {
|
||||||
|
params.put(Constants.PARAMETER_CASCADE_DELETE, new String[]{theCascadeParam.trim()});
|
||||||
|
}
|
||||||
|
if (isNotBlank(theCascadeMaxRoundsParam)) {
|
||||||
|
params.put(Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS, new String[]{theCascadeMaxRoundsParam.trim()});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isNotBlank(theCascadeHeader)) {
|
||||||
|
when(myRequestDetails.getHeader(Constants.HEADER_CASCADE)).thenReturn(theCascadeHeader);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theExpectedMaxRounds != null && theExpectedMaxRounds == -1) {
|
||||||
|
try {
|
||||||
|
RestfulServerUtils.extractDeleteCascadeParameter(myRequestDetails);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
RestfulServerUtils.DeleteCascadeDetails outcome = RestfulServerUtils.extractDeleteCascadeParameter(myRequestDetails);
|
||||||
|
assertEquals(theExpectedMode, outcome.getMode());
|
||||||
|
assertEquals(theExpectedMaxRounds, outcome.getMaxRounds());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCreateSelfLinks() {
|
public void testCreateSelfLinks() {
|
||||||
|
|
|
@ -135,9 +135,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
|
||||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
|
|
@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
||||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@ -54,7 +55,7 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull BulkExportJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull BulkExportJobParameters theParameters) {
|
||||||
List<String> errorMsgs = new ArrayList<>();
|
List<String> errorMsgs = new ArrayList<>();
|
||||||
|
|
||||||
// initial validation
|
// initial validation
|
||||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||||
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
|
@ -46,14 +47,15 @@ public class DeleteExpungeAppCtx {
|
||||||
IBatch2DaoSvc theBatch2DaoSvc,
|
IBatch2DaoSvc theBatch2DaoSvc,
|
||||||
HapiTransactionService theHapiTransactionService,
|
HapiTransactionService theHapiTransactionService,
|
||||||
IDeleteExpungeSvc theDeleteExpungeSvc,
|
IDeleteExpungeSvc theDeleteExpungeSvc,
|
||||||
IIdHelperService theIdHelperService) {
|
IIdHelperService theIdHelperService,
|
||||||
|
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||||
return JobDefinition
|
return JobDefinition
|
||||||
.newBuilder()
|
.newBuilder()
|
||||||
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
|
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
|
||||||
.setJobDescription("Expunge resources")
|
.setJobDescription("Expunge resources")
|
||||||
.setJobDefinitionVersion(1)
|
.setJobDefinitionVersion(1)
|
||||||
.setParametersType(DeleteExpungeJobParameters.class)
|
.setParametersType(DeleteExpungeJobParameters.class)
|
||||||
.setParametersValidator(expungeJobParametersValidator(theBatch2DaoSvc))
|
.setParametersValidator(expungeJobParametersValidator(theBatch2DaoSvc, theDeleteExpungeSvc, theRequestPartitionHelperSvc))
|
||||||
.gatedExecution()
|
.gatedExecution()
|
||||||
.addFirstStep(
|
.addFirstStep(
|
||||||
"generate-ranges",
|
"generate-ranges",
|
||||||
|
@ -73,8 +75,8 @@ public class DeleteExpungeAppCtx {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc) {
|
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc, IDeleteExpungeSvc theDeleteExpungeSvc, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||||
return new DeleteExpungeJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc));
|
return new DeleteExpungeJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc), theDeleteExpungeSvc, theRequestPartitionHelperSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
|
|
@ -20,6 +20,34 @@
|
||||||
package ca.uhn.fhir.batch2.jobs.expunge;
|
package ca.uhn.fhir.batch2.jobs.expunge;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
|
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
public class DeleteExpungeJobParameters extends PartitionedUrlListJobParameters {
|
public class DeleteExpungeJobParameters extends PartitionedUrlListJobParameters {
|
||||||
|
@JsonProperty("cascade")
|
||||||
|
private boolean myCascade;
|
||||||
|
@JsonProperty("cascadeMaxRounds")
|
||||||
|
private Integer myCascadeMaxRounds;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public DeleteExpungeJobParameters() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getCascadeMaxRounds() {
|
||||||
|
return myCascadeMaxRounds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCascadeMaxRounds(Integer theCascadeMaxRounds) {
|
||||||
|
myCascadeMaxRounds = theCascadeMaxRounds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isCascade() {
|
||||||
|
return myCascade;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCascade(boolean theCascade) {
|
||||||
|
myCascade = theCascade;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,22 +20,47 @@
|
||||||
package ca.uhn.fhir.batch2.jobs.expunge;
|
package ca.uhn.fhir.batch2.jobs.expunge;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
import ca.uhn.fhir.batch2.jobs.parameters.IUrlListValidator;
|
||||||
|
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
||||||
|
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||||
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class DeleteExpungeJobParametersValidator implements IJobParametersValidator<DeleteExpungeJobParameters> {
|
public class DeleteExpungeJobParametersValidator implements IJobParametersValidator<DeleteExpungeJobParameters> {
|
||||||
private final UrlListValidator myUrlListValidator;
|
private final IUrlListValidator myUrlListValidator;
|
||||||
|
private final IDeleteExpungeSvc<?> myDeleteExpungeSvc;
|
||||||
|
private final IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||||
|
|
||||||
public DeleteExpungeJobParametersValidator(UrlListValidator theUrlListValidator) {
|
public DeleteExpungeJobParametersValidator(IUrlListValidator theUrlListValidator, IDeleteExpungeSvc<?> theDeleteExpungeSvc, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||||
myUrlListValidator = theUrlListValidator;
|
myUrlListValidator = theUrlListValidator;
|
||||||
|
myDeleteExpungeSvc = theDeleteExpungeSvc;
|
||||||
|
myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull DeleteExpungeJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull DeleteExpungeJobParameters theParameters) {
|
||||||
|
|
||||||
|
// Make sure cascade is supported if requested
|
||||||
|
if (theParameters.isCascade() && !myDeleteExpungeSvc.isCascadeSupported()) {
|
||||||
|
return List.of("Cascading delete is not supported on this server");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the user has access to all requested partitions
|
||||||
|
myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, null, theParameters.getRequestPartitionId());
|
||||||
|
|
||||||
|
for (PartitionedUrl partitionedUrl : theParameters.getPartitionedUrls()) {
|
||||||
|
String url = partitionedUrl.getUrl();
|
||||||
|
ValidateUtil.isTrueOrThrowInvalidRequest(url.matches("[a-zA-Z]+\\?.*"), "Delete expunge URLs must be in the format [resourceType]?[parameters]");
|
||||||
|
if (partitionedUrl.getRequestPartitionId() != null) {
|
||||||
|
myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, null, partitionedUrl.getRequestPartitionId());
|
||||||
|
}
|
||||||
|
}
|
||||||
return myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
|
return myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.NEVER)
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
public String submitJob(Integer theBatchSize, List<String> theUrlsToDeleteExpunge, RequestDetails theRequestDetails) {
|
public String submitJob(Integer theBatchSize, List<String> theUrlsToDeleteExpunge, boolean theCascade, Integer theCascadeMaxRounds, RequestDetails theRequestDetails) {
|
||||||
if (theBatchSize == null) {
|
if (theBatchSize == null) {
|
||||||
theBatchSize = myStorageSettings.getExpungeBatchSize();
|
theBatchSize = myStorageSettings.getExpungeBatchSize();
|
||||||
}
|
}
|
||||||
|
@ -94,11 +94,13 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
|
||||||
// Also set toplevel partition in case there are no urls
|
// Also set toplevel partition in case there are no urls
|
||||||
RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
|
RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
|
||||||
deleteExpungeJobParameters.setRequestPartitionId(requestPartition);
|
deleteExpungeJobParameters.setRequestPartitionId(requestPartition);
|
||||||
|
deleteExpungeJobParameters.setCascade(theCascade);
|
||||||
|
deleteExpungeJobParameters.setCascadeMaxRounds(theCascadeMaxRounds);
|
||||||
|
|
||||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||||
startRequest.setJobDefinitionId(JOB_DELETE_EXPUNGE);
|
startRequest.setJobDefinitionId(JOB_DELETE_EXPUNGE);
|
||||||
startRequest.setParameters(deleteExpungeJobParameters);
|
startRequest.setParameters(deleteExpungeJobParameters);
|
||||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
|
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(theRequestDetails, startRequest);
|
||||||
return startResponse.getInstanceId();
|
return startResponse.getInstanceId();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,9 @@ public class DeleteExpungeProvider {
|
||||||
@Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false)
|
@Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false)
|
||||||
public IBaseParameters deleteExpunge(
|
public IBaseParameters deleteExpunge(
|
||||||
@OperationParam(name = ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, typeName = "string", min = 1) List<IPrimitiveType<String>> theUrlsToDeleteExpunge,
|
@OperationParam(name = ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, typeName = "string", min = 1) List<IPrimitiveType<String>> theUrlsToDeleteExpunge,
|
||||||
@OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType<BigDecimal> theBatchSize,
|
@OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "integer", min = 0, max = 1) IPrimitiveType<Integer> theBatchSize,
|
||||||
|
@OperationParam(name = ProviderConstants.OPERATION_DELETE_CASCADE, typeName = "boolean", min = 0, max = 1) IPrimitiveType<Boolean> theCascade,
|
||||||
|
@OperationParam(name = ProviderConstants.OPERATION_DELETE_CASCADE_MAX_ROUNDS, typeName = "integer", min = 0, max = 1) IPrimitiveType<Integer> theCascadeMaxRounds,
|
||||||
RequestDetails theRequestDetails
|
RequestDetails theRequestDetails
|
||||||
) {
|
) {
|
||||||
if (theUrlsToDeleteExpunge == null) {
|
if (theUrlsToDeleteExpunge == null) {
|
||||||
|
@ -60,10 +62,21 @@ public class DeleteExpungeProvider {
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
Integer batchSize = null;
|
Integer batchSize = null;
|
||||||
if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().intValue() > 0) {
|
if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue() > 0) {
|
||||||
batchSize = theBatchSize.getValue().intValue();
|
batchSize = theBatchSize.getValue();
|
||||||
}
|
}
|
||||||
String jobId = myDeleteExpungeJobSubmitter.submitJob(batchSize, urls, theRequestDetails);
|
|
||||||
|
boolean cascase = false;
|
||||||
|
if (theCascade != null && theCascade.hasValue()) {
|
||||||
|
cascase = theCascade.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
Integer cascadeMaxRounds = null;
|
||||||
|
if (theCascadeMaxRounds != null) {
|
||||||
|
cascadeMaxRounds = theCascadeMaxRounds.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
String jobId = myDeleteExpungeJobSubmitter.submitJob(batchSize, urls, cascase, cascadeMaxRounds, theRequestDetails);
|
||||||
|
|
||||||
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
IBaseParameters retval = ParametersUtil.newInstance(myFhirContext);
|
||||||
ParametersUtil.addParameterToParametersString(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobId);
|
ParametersUtil.addParameterToParametersString(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobId);
|
||||||
|
|
|
@ -19,20 +19,14 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.batch2.jobs.expunge;
|
package ca.uhn.fhir.batch2.jobs.expunge;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
import ca.uhn.fhir.batch2.api.*;
|
||||||
import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
|
||||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|
||||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
|
||||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
|
||||||
import ca.uhn.fhir.batch2.api.VoidModel;
|
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
||||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
|
|
||||||
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -42,7 +36,7 @@ import org.springframework.transaction.support.TransactionCallback;
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class DeleteExpungeStep implements IJobStepWorker<ReindexJobParameters, ResourceIdListWorkChunkJson, VoidModel> {
|
public class DeleteExpungeStep implements IJobStepWorker<DeleteExpungeJobParameters, ResourceIdListWorkChunkJson, VoidModel> {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeStep.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeStep.class);
|
||||||
private final HapiTransactionService myHapiTransactionService;
|
private final HapiTransactionService myHapiTransactionService;
|
||||||
|
@ -57,20 +51,27 @@ public class DeleteExpungeStep implements IJobStepWorker<ReindexJobParameters, R
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public RunOutcome run(@Nonnull StepExecutionDetails<ReindexJobParameters, ResourceIdListWorkChunkJson> theStepExecutionDetails, @Nonnull IJobDataSink<VoidModel> theDataSink) throws JobExecutionFailedException {
|
public RunOutcome run(@Nonnull StepExecutionDetails<DeleteExpungeJobParameters, ResourceIdListWorkChunkJson> theStepExecutionDetails, @Nonnull IJobDataSink<VoidModel> theDataSink) throws JobExecutionFailedException {
|
||||||
|
|
||||||
ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData();
|
ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData();
|
||||||
|
|
||||||
return doDeleteExpunge(data, theDataSink, theStepExecutionDetails.getInstance().getInstanceId(), theStepExecutionDetails.getChunkId());
|
boolean cascade = theStepExecutionDetails.getParameters().isCascade();
|
||||||
|
Integer cascadeMaxRounds = theStepExecutionDetails.getParameters().getCascadeMaxRounds();
|
||||||
|
return doDeleteExpunge(data, theDataSink, theStepExecutionDetails.getInstance().getInstanceId(), theStepExecutionDetails.getChunkId(), cascade, cascadeMaxRounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
public RunOutcome doDeleteExpunge(ResourceIdListWorkChunkJson data, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId) {
|
public RunOutcome doDeleteExpunge(ResourceIdListWorkChunkJson theData, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId, boolean theCascade, Integer theCascadeMaxRounds) {
|
||||||
RequestDetails requestDetails = new SystemRequestDetails();
|
RequestDetails requestDetails = new SystemRequestDetails();
|
||||||
TransactionDetails transactionDetails = new TransactionDetails();
|
TransactionDetails transactionDetails = new TransactionDetails();
|
||||||
myHapiTransactionService.execute(requestDetails, transactionDetails, new DeleteExpungeJob(data, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId));
|
DeleteExpungeJob job = new DeleteExpungeJob(theData, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId, theCascade, theCascadeMaxRounds);
|
||||||
|
myHapiTransactionService
|
||||||
|
.withRequest(requestDetails)
|
||||||
|
.withTransactionDetails(transactionDetails)
|
||||||
|
.withRequestPartitionId(theData.getRequestPartitionId())
|
||||||
|
.execute(job);
|
||||||
|
|
||||||
return new RunOutcome(data.size());
|
return new RunOutcome(job.getRecordCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
private class DeleteExpungeJob implements TransactionCallback<Void> {
|
private class DeleteExpungeJob implements TransactionCallback<Void> {
|
||||||
|
@ -80,14 +81,23 @@ public class DeleteExpungeStep implements IJobStepWorker<ReindexJobParameters, R
|
||||||
private final IJobDataSink<VoidModel> myDataSink;
|
private final IJobDataSink<VoidModel> myDataSink;
|
||||||
private final String myChunkId;
|
private final String myChunkId;
|
||||||
private final String myInstanceId;
|
private final String myInstanceId;
|
||||||
|
private final boolean myCascade;
|
||||||
|
private final Integer myCascadeMaxRounds;
|
||||||
|
private int myRecordCount;
|
||||||
|
|
||||||
public DeleteExpungeJob(ResourceIdListWorkChunkJson theData, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId) {
|
public DeleteExpungeJob(ResourceIdListWorkChunkJson theData, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IJobDataSink<VoidModel> theDataSink, String theInstanceId, String theChunkId, boolean theCascade, Integer theCascadeMaxRounds) {
|
||||||
myData = theData;
|
myData = theData;
|
||||||
myRequestDetails = theRequestDetails;
|
myRequestDetails = theRequestDetails;
|
||||||
myTransactionDetails = theTransactionDetails;
|
myTransactionDetails = theTransactionDetails;
|
||||||
myDataSink = theDataSink;
|
myDataSink = theDataSink;
|
||||||
myInstanceId = theInstanceId;
|
myInstanceId = theInstanceId;
|
||||||
myChunkId = theChunkId;
|
myChunkId = theChunkId;
|
||||||
|
myCascade = theCascade;
|
||||||
|
myCascadeMaxRounds = theCascadeMaxRounds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRecordCount() {
|
||||||
|
return myRecordCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -100,15 +110,13 @@ public class DeleteExpungeStep implements IJobStepWorker<ReindexJobParameters, R
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
ourLog.info("Starting delete expunge work chunk with {} resources - Instance[{}] Chunk[{}]", persistentIds.size(), myInstanceId, myChunkId);
|
ourLog.info("Starting delete expunge work chunk with {} resources - Instance[{}] Chunk[{}]", persistentIds.size(), myInstanceId, myChunkId);
|
||||||
|
|
||||||
myDeleteExpungeSvc.deleteExpunge(persistentIds);
|
myRecordCount = myDeleteExpungeSvc.deleteExpunge(persistentIds, myCascade, myCascadeMaxRounds);
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
||||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ public class BulkImportParameterValidator implements IJobParametersValidator<Bat
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull Batch2BulkImportPullJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull Batch2BulkImportPullJobParameters theParameters) {
|
||||||
ourLog.info("BulkImportPull parameter validation begin");
|
ourLog.info("BulkImportPull parameter validation begin");
|
||||||
|
|
||||||
ArrayList<String> errors = new ArrayList<>();
|
ArrayList<String> errors = new ArrayList<>();
|
||||||
|
|
|
@ -179,7 +179,7 @@ public class BulkDataImportProvider {
|
||||||
|
|
||||||
ourLog.info("Requesting Bulk Import Job ($import by Manifest) with {} urls", typeAndUrls.size());
|
ourLog.info("Requesting Bulk Import Job ($import by Manifest) with {} urls", typeAndUrls.size());
|
||||||
|
|
||||||
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
String jobId = jobStartResponse.getInstanceId();
|
String jobId = jobStartResponse.getInstanceId();
|
||||||
|
|
||||||
IBaseOperationOutcome response = OperationOutcomeUtil.newInstance(myFhirCtx);
|
IBaseOperationOutcome response = OperationOutcomeUtil.newInstance(myFhirCtx);
|
||||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.batch2.jobs.reindex;
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -38,7 +39,7 @@ public class ReindexJobParametersValidator implements IJobParametersValidator<Re
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull ReindexJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull ReindexJobParameters theParameters) {
|
||||||
List<String> errors = myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
|
List<String> errors = myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls());
|
||||||
|
|
||||||
if (errors == null || errors.isEmpty()) {
|
if (errors == null || errors.isEmpty()) {
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class ReindexProvider {
|
||||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||||
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||||
request.setParameters(params);
|
request.setParameters(params);
|
||||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
||||||
ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, response.getInstanceId());
|
ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, response.getInstanceId());
|
||||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -54,11 +55,11 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Batch2JobStartResponse startNewJob(Batch2BaseJobParameters theParameters) {
|
public Batch2JobStartResponse startNewJob(RequestDetails theRequestDetails, Batch2BaseJobParameters theParameters) {
|
||||||
switch (theParameters.getJobDefinitionId()) {
|
switch (theParameters.getJobDefinitionId()) {
|
||||||
case Batch2JobDefinitionConstants.BULK_EXPORT:
|
case Batch2JobDefinitionConstants.BULK_EXPORT:
|
||||||
if (theParameters instanceof BulkExportParameters) {
|
if (theParameters instanceof BulkExportParameters) {
|
||||||
return startBatch2BulkExportJob((BulkExportParameters) theParameters);
|
return startBatch2BulkExportJob(theRequestDetails, (BulkExportParameters) theParameters);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
ourLog.error("Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT);
|
ourLog.error("Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||||
|
@ -119,11 +120,11 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner {
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Batch2JobStartResponse startBatch2BulkExportJob(BulkExportParameters theParameters) {
|
private Batch2JobStartResponse startBatch2BulkExportJob(RequestDetails theRequestDetails, BulkExportParameters theParameters) {
|
||||||
JobInstanceStartRequest request = createStartRequest(theParameters);
|
JobInstanceStartRequest request = createStartRequest(theParameters);
|
||||||
BulkExportJobParameters parameters = BulkExportJobParameters.createFromExportJobParameters(theParameters);
|
BulkExportJobParameters parameters = BulkExportJobParameters.createFromExportJobParameters(theParameters);
|
||||||
request.setParameters(parameters);
|
request.setParameters(parameters);
|
||||||
return myJobCoordinator.startInstance(request);
|
return myJobCoordinator.startInstance(theRequestDetails, request);
|
||||||
}
|
}
|
||||||
|
|
||||||
private JobInstanceStartRequest createStartRequest(Batch2BaseJobParameters theParameters) {
|
private JobInstanceStartRequest createStartRequest(Batch2BaseJobParameters theParameters) {
|
||||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.batch2.jobs.termcodesystem.codesystemdelete;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
|
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -31,7 +32,7 @@ public class TermCodeSystemDeleteJobParametersValidator implements IJobParameter
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull TermCodeSystemDeleteJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteJobParameters theParameters) {
|
||||||
List<String> errors = new ArrayList<>();
|
List<String> errors = new ArrayList<>();
|
||||||
if (theParameters.getTermPid() <= 0) {
|
if (theParameters.getTermPid() <= 0) {
|
||||||
errors.add("Invalid Term Code System PID " + theParameters.getTermPid());
|
errors.add("Invalid Term Code System PID " + theParameters.getTermPid());
|
||||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.batch2.jobs.termcodesystem.codesystemversiondelete;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
|
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -31,7 +32,7 @@ public class DeleteCodeSystemVersionParameterValidator implements IJobParameters
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull TermCodeSystemDeleteVersionJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteVersionJobParameters theParameters) {
|
||||||
ArrayList<String> errors = new ArrayList<>();
|
ArrayList<String> errors = new ArrayList<>();
|
||||||
long versionPID = theParameters.getCodeSystemVersionPid();
|
long versionPID = theParameters.getCodeSystemVersionPid();
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> result = myValidator.validate(parameters);
|
List<String> result = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
@ -69,7 +69,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false);
|
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false);
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(errors);
|
assertNotNull(errors);
|
||||||
|
@ -86,7 +86,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
|
||||||
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true);
|
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true);
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(errors);
|
assertNotNull(errors);
|
||||||
|
@ -103,7 +103,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> result = myValidator.validate(parameters);
|
List<String> result = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
@ -119,7 +119,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
parameters.setResourceTypes(Collections.singletonList(resourceType));
|
parameters.setResourceTypes(Collections.singletonList(resourceType));
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> result = myValidator.validate(parameters);
|
List<String> result = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
@ -140,7 +140,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> result = myValidator.validate(parameters);
|
List<String> result = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
@ -154,7 +154,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> result = myValidator.validate(parameters);
|
List<String> result = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
@ -169,7 +169,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
parameters.setResourceTypes(null);
|
parameters.setResourceTypes(null);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> results = myValidator.validate(parameters);
|
List<String> results = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertNotNull(results);
|
assertNotNull(results);
|
||||||
|
@ -185,7 +185,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
parameters.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
parameters.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// validate
|
// validate
|
||||||
assertNotNull(errors);
|
assertNotNull(errors);
|
||||||
|
@ -201,7 +201,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
parameters.setOutputFormat("json");
|
parameters.setOutputFormat("json");
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// validate
|
// validate
|
||||||
assertNotNull(errors);
|
assertNotNull(errors);
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
package ca.uhn.fhir.batch2.jobs.expunge;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.jobs.parameters.IUrlListValidator;
|
||||||
|
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||||
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.InjectMocks;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class DeleteExpungeJobParametersValidatorTest {
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private IDeleteExpungeSvc<?> myDeleteExpungeSvc;
|
||||||
|
@Mock
|
||||||
|
private IUrlListValidator myUrlListValidator;
|
||||||
|
@Mock
|
||||||
|
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||||
|
@InjectMocks
|
||||||
|
private DeleteExpungeJobParametersValidator mySvc;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRejectCascadeIfNotSupported() {
|
||||||
|
// Setup
|
||||||
|
when(myDeleteExpungeSvc.isCascadeSupported()).thenReturn(false);
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters parameters = new DeleteExpungeJobParameters();
|
||||||
|
parameters.addUrl("Patient?active=true");
|
||||||
|
parameters.setCascade(true);
|
||||||
|
|
||||||
|
// Test
|
||||||
|
List<String> outcome = mySvc.validate(new SystemRequestDetails(), parameters);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
assertThat(outcome.toString(), outcome, contains("Cascading delete is not supported on this server"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidateSuccess() {
|
||||||
|
// Setup
|
||||||
|
when(myDeleteExpungeSvc.isCascadeSupported()).thenReturn(true);
|
||||||
|
|
||||||
|
DeleteExpungeJobParameters parameters = new DeleteExpungeJobParameters();
|
||||||
|
parameters.addUrl("Patient?active=true");
|
||||||
|
parameters.setCascade(true);
|
||||||
|
|
||||||
|
// Test
|
||||||
|
List<String> outcome = mySvc.validate(new SystemRequestDetails(), parameters);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
assertThat(outcome, empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,80 +1,112 @@
|
||||||
package ca.uhn.fhir.batch2.jobs.expunge;
|
package ca.uhn.fhir.batch2.jobs.expunge;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.jobs.BaseR4ServerTest;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||||
|
import ca.uhn.fhir.test.utilities.HttpClientExtension;
|
||||||
|
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
|
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
|
||||||
|
import org.hl7.fhir.r4.model.BooleanType;
|
||||||
import org.hl7.fhir.r4.model.DecimalType;
|
import org.hl7.fhir.r4.model.DecimalType;
|
||||||
|
import org.hl7.fhir.r4.model.IntegerType;
|
||||||
import org.hl7.fhir.r4.model.Parameters;
|
import org.hl7.fhir.r4.model.Parameters;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.hasSize;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.mockito.ArgumentMatchers.*;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
public class DeleteExpungeProviderTest extends BaseR4ServerTest {
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class DeleteExpungeProviderTest {
|
||||||
public static final String TEST_JOB_ID = "test-job-id";
|
public static final String TEST_JOB_ID = "test-job-id";
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProviderTest.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProviderTest.class);
|
||||||
|
|
||||||
private Parameters myReturnParameters;
|
private static final FhirContext ourCtx = FhirContext.forR4Cached();
|
||||||
private MyDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter = new MyDeleteExpungeJobSubmitter();
|
|
||||||
|
@RegisterExtension
|
||||||
|
public static RestfulServerExtension myServer = new RestfulServerExtension(ourCtx);
|
||||||
|
@RegisterExtension
|
||||||
|
private final HttpClientExtension myClient = new HttpClientExtension();
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
|
||||||
|
private DeleteExpungeProvider myProvider;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void reset() {
|
public void beforeEach() {
|
||||||
myReturnParameters = new Parameters();
|
myProvider = new DeleteExpungeProvider(ourCtx, myDeleteExpungeJobSubmitter);
|
||||||
myReturnParameters.addParameter("success", true);
|
myServer.registerProvider(myProvider);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach() {
|
||||||
|
myServer.unregisterProvider(myProvider);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteExpunge() throws Exception {
|
public void testSupplyingNoUrlsProvidesValidErrorMessage() throws IOException {
|
||||||
|
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_DELETE_EXPUNGE);
|
||||||
|
try(CloseableHttpResponse execute = myClient.execute(post)) {
|
||||||
|
String body = IOUtils.toString(execute.getEntity().getContent(), Charset.defaultCharset());
|
||||||
|
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
|
||||||
|
assertThat(body, is(containsString("At least one `url` parameter to $delete-expunge must be provided.")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteExpunge() {
|
||||||
// setup
|
// setup
|
||||||
Parameters input = new Parameters();
|
Parameters input = new Parameters();
|
||||||
String url1 = "Observation?status=active";
|
String url1 = "Observation?status=active";
|
||||||
String url2 = "Patient?active=false";
|
String url2 = "Patient?active=false";
|
||||||
Integer batchSize = 2401;
|
int batchSize = 2401;
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url1);
|
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url1);
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url2);
|
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url2);
|
||||||
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize));
|
input.addParameter(ProviderConstants.OPERATION_DELETE_CASCADE, new BooleanType(true));
|
||||||
|
input.addParameter(ProviderConstants.OPERATION_DELETE_CASCADE_MAX_ROUNDS, new IntegerType(44));
|
||||||
|
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new IntegerType(batchSize));
|
||||||
|
|
||||||
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
|
when(myDeleteExpungeJobSubmitter.submitJob(any(), any(), anyBoolean(), any(), any())).thenReturn(TEST_JOB_ID);
|
||||||
|
|
||||||
DeleteExpungeProvider provider = new DeleteExpungeProvider(myCtx, myDeleteExpungeJobSubmitter);
|
// Test
|
||||||
startServer(provider);
|
Parameters response = myServer
|
||||||
|
.getFhirClient()
|
||||||
Parameters response = myClient
|
|
||||||
.operation()
|
.operation()
|
||||||
.onServer()
|
.onServer()
|
||||||
.named(ProviderConstants.OPERATION_DELETE_EXPUNGE)
|
.named(ProviderConstants.OPERATION_DELETE_EXPUNGE)
|
||||||
.withParameters(input)
|
.withParameters(input)
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
// Verify
|
||||||
|
ourLog.debug(ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||||
assertEquals(TEST_JOB_ID, BatchHelperR4.jobIdFromBatch2Parameters(response));
|
assertEquals(TEST_JOB_ID, BatchHelperR4.jobIdFromBatch2Parameters(response));
|
||||||
assertThat(myDeleteExpungeJobSubmitter.calledWithUrls, hasSize(2));
|
|
||||||
assertEquals(url1, myDeleteExpungeJobSubmitter.calledWithUrls.get(0));
|
verify(myDeleteExpungeJobSubmitter, times(1)).submitJob(
|
||||||
assertEquals(url2, myDeleteExpungeJobSubmitter.calledWithUrls.get(1));
|
eq(2401),
|
||||||
assertEquals(batchSize, myDeleteExpungeJobSubmitter.calledWithBatchSize);
|
eq(List.of(url1, url2)),
|
||||||
assertNotNull(myDeleteExpungeJobSubmitter.calledWithRequestDetails);
|
eq(true),
|
||||||
|
eq(44),
|
||||||
|
any()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private class MyDeleteExpungeJobSubmitter implements IDeleteExpungeJobSubmitter {
|
|
||||||
Integer calledWithBatchSize;
|
|
||||||
List<String> calledWithUrls;
|
|
||||||
RequestDetails calledWithRequestDetails;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String submitJob(Integer theBatchSize, List<String> theUrlsToProcess, RequestDetails theRequest) {
|
|
||||||
calledWithBatchSize = theBatchSize;
|
|
||||||
calledWithUrls = theUrlsToProcess;
|
|
||||||
calledWithRequestDetails = theRequest;
|
|
||||||
return TEST_JOB_ID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,6 +64,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -118,7 +119,7 @@ public class BulkDataImportProviderTest {
|
||||||
String jobId = UUID.randomUUID().toString();
|
String jobId = UUID.randomUUID().toString();
|
||||||
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
|
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
|
||||||
startResponse.setInstanceId(jobId);
|
startResponse.setInstanceId(jobId);
|
||||||
when(myJobCoordinator.startInstance(any()))
|
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||||
.thenReturn(startResponse);
|
.thenReturn(startResponse);
|
||||||
|
|
||||||
String requestUrl;
|
String requestUrl;
|
||||||
|
@ -149,7 +150,7 @@ public class BulkDataImportProviderTest {
|
||||||
assertEquals("Use the following URL to poll for job status: " + requestUrl + "$import-poll-status?_jobId=" + jobId, oo.getIssue().get(1).getDiagnostics());
|
assertEquals("Use the following URL to poll for job status: " + requestUrl + "$import-poll-status?_jobId=" + jobId, oo.getIssue().get(1).getDiagnostics());
|
||||||
}
|
}
|
||||||
|
|
||||||
verify(myJobCoordinator, times(1)).startInstance(myStartRequestCaptor.capture());
|
verify(myJobCoordinator, times(1)).startInstance(isNotNull(), myStartRequestCaptor.capture());
|
||||||
|
|
||||||
JobInstanceStartRequest startRequest = myStartRequestCaptor.getValue();
|
JobInstanceStartRequest startRequest = myStartRequestCaptor.getValue();
|
||||||
ourLog.info("Parameters: {}", startRequest.getParameters());
|
ourLog.info("Parameters: {}", startRequest.getParameters());
|
||||||
|
@ -407,6 +408,7 @@ public class BulkDataImportProviderTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
||||||
if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
|
if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
|
||||||
throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
|
throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
package ca.uhn.fhir.batch2.jobs.reindex;
|
package ca.uhn.fhir.batch2.jobs.reindex;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.ValueSource;
|
import org.junit.jupiter.params.provider.ValueSource;
|
||||||
import org.mockito.InjectMocks;
|
import org.mockito.InjectMocks;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -40,7 +38,7 @@ public class ReindexJobParametersValidatorTest {
|
||||||
parameters.addUrl(theUrl);
|
parameters.addUrl(theUrl);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
import static org.mockito.ArgumentMatchers.anyString;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -71,7 +72,7 @@ public class ReindexProviderTest {
|
||||||
public void beforeEach() {
|
public void beforeEach() {
|
||||||
myServerExtension.registerProvider(mySvc);
|
myServerExtension.registerProvider(mySvc);
|
||||||
|
|
||||||
when(myJobCoordinator.startInstance(any()))
|
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||||
.thenReturn(createJobStartResponse());
|
.thenReturn(createJobStartResponse());
|
||||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any())).thenReturn(RequestPartitionId.allPartitions());
|
when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any())).thenReturn(RequestPartitionId.allPartitions());
|
||||||
}
|
}
|
||||||
|
@ -119,7 +120,7 @@ public class ReindexProviderTest {
|
||||||
StringType jobId = (StringType) response.getParameterValue(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
StringType jobId = (StringType) response.getParameterValue(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
||||||
assertEquals(TEST_JOB_ID, jobId.getValue());
|
assertEquals(TEST_JOB_ID, jobId.getValue());
|
||||||
|
|
||||||
verify(myJobCoordinator, times(1)).startInstance(myStartRequestCaptor.capture());
|
verify(myJobCoordinator, times(1)).startInstance(isNotNull(), myStartRequestCaptor.capture());
|
||||||
ReindexJobParameters params = myStartRequestCaptor.getValue().getParameters(ReindexJobParameters.class);
|
ReindexJobParameters params = myStartRequestCaptor.getValue().getParameters(ReindexJobParameters.class);
|
||||||
assertThat(params.getPartitionedUrls(), hasSize(1));
|
assertThat(params.getPartitionedUrls(), hasSize(1));
|
||||||
assertEquals(url, params.getPartitionedUrls().get(0).getUrl());
|
assertEquals(url, params.getPartitionedUrls().get(0).getUrl());
|
||||||
|
@ -155,7 +156,7 @@ public class ReindexProviderTest {
|
||||||
StringType jobId = (StringType) response.getParameterValue(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
StringType jobId = (StringType) response.getParameterValue(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID);
|
||||||
assertEquals(TEST_JOB_ID, jobId.getValue());
|
assertEquals(TEST_JOB_ID, jobId.getValue());
|
||||||
|
|
||||||
verify(myJobCoordinator, times(1)).startInstance(myStartRequestCaptor.capture());
|
verify(myJobCoordinator, times(1)).startInstance(isNotNull(), myStartRequestCaptor.capture());
|
||||||
ReindexJobParameters params = myStartRequestCaptor.getValue().getParameters(ReindexJobParameters.class);
|
ReindexJobParameters params = myStartRequestCaptor.getValue().getParameters(ReindexJobParameters.class);
|
||||||
assertThat(params.getPartitionedUrls(), empty());
|
assertThat(params.getPartitionedUrls(), empty());
|
||||||
// Non-default values
|
// Non-default values
|
||||||
|
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
||||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||||
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||||
import ch.qos.logback.classic.Level;
|
import ch.qos.logback.classic.Level;
|
||||||
import ch.qos.logback.classic.Logger;
|
import ch.qos.logback.classic.Logger;
|
||||||
|
@ -35,6 +36,7 @@ import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||||
import static org.mockito.Mockito.never;
|
import static org.mockito.Mockito.never;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -70,7 +72,7 @@ public class Batch2JobRunnerImplTest {
|
||||||
ourLog.setLevel(Level.ERROR);
|
ourLog.setLevel(Level.ERROR);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
myJobRunner.startNewJob(new Batch2BaseJobParameters(jobId));
|
myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(jobId));
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
||||||
|
@ -87,7 +89,7 @@ public class Batch2JobRunnerImplTest {
|
||||||
ourLog.setLevel(Level.ERROR);
|
ourLog.setLevel(Level.ERROR);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
myJobRunner.startNewJob(new Batch2BaseJobParameters(Batch2JobDefinitionConstants.BULK_EXPORT));
|
myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(Batch2JobDefinitionConstants.BULK_EXPORT));
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
||||||
|
@ -115,12 +117,12 @@ public class Batch2JobRunnerImplTest {
|
||||||
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
myJobRunner.startNewJob(parameters);
|
myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||||
verify(myJobCoordinator)
|
verify(myJobCoordinator)
|
||||||
.startInstance(captor.capture());
|
.startInstance(isNotNull(), captor.capture());
|
||||||
JobInstanceStartRequest val = captor.getValue();
|
JobInstanceStartRequest val = captor.getValue();
|
||||||
// we need to verify something in the parameters
|
// we need to verify something in the parameters
|
||||||
ourLog.info(val.getParameters());
|
ourLog.info(val.getParameters());
|
||||||
|
@ -175,12 +177,12 @@ public class Batch2JobRunnerImplTest {
|
||||||
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
||||||
|
|
||||||
// test
|
// test
|
||||||
myJobRunner.startNewJob(parameters);
|
myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||||
verify(myJobCoordinator)
|
verify(myJobCoordinator)
|
||||||
.startInstance(captor.capture());
|
.startInstance(isNotNull(), captor.capture());
|
||||||
JobInstanceStartRequest val = captor.getValue();
|
JobInstanceStartRequest val = captor.getValue();
|
||||||
// we need to verify something in the parameters
|
// we need to verify something in the parameters
|
||||||
ourLog.info(val.getParameters());
|
ourLog.info(val.getParameters());
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||||
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import org.springframework.data.domain.Page;
|
import org.springframework.data.domain.Page;
|
||||||
|
@ -40,8 +41,24 @@ public interface IJobCoordinator {
|
||||||
* @param theStartRequest The request, containing the job type and parameters
|
* @param theStartRequest The request, containing the job type and parameters
|
||||||
* @return Returns a unique ID for this job execution
|
* @return Returns a unique ID for this job execution
|
||||||
* @throws InvalidRequestException If the request is invalid (incorrect/missing parameters, etc)
|
* @throws InvalidRequestException If the request is invalid (incorrect/missing parameters, etc)
|
||||||
|
* @deprecated Use {@link #startInstance(RequestDetails, JobInstanceStartRequest)}
|
||||||
*/
|
*/
|
||||||
Batch2JobStartResponse startInstance(JobInstanceStartRequest theStartRequest) throws InvalidRequestException;
|
@Deprecated(since = "6.8.0", forRemoval = true)
|
||||||
|
default Batch2JobStartResponse startInstance(JobInstanceStartRequest theStartRequest) throws InvalidRequestException {
|
||||||
|
return startInstance(null, theStartRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts a new job instance
|
||||||
|
*
|
||||||
|
* @param theRequestDetails The request details associated with the request. This will get used to validate that the
|
||||||
|
* request is appropriate for the given user, so if at all possible it should be the
|
||||||
|
* original RequestDetails from the server request.
|
||||||
|
* @param theStartRequest The request, containing the job type and parameters
|
||||||
|
* @return Returns a unique ID for this job execution
|
||||||
|
* @throws InvalidRequestException If the request is invalid (incorrect/missing parameters, etc)
|
||||||
|
*/
|
||||||
|
Batch2JobStartResponse startInstance(RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) throws InvalidRequestException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch details about a job instance
|
* Fetch details about a job instance
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package ca.uhn.fhir.batch2.api;
|
package ca.uhn.fhir.batch2.api;
|
||||||
|
|
||||||
import ca.uhn.fhir.model.api.IModelJson;
|
import ca.uhn.fhir.model.api.IModelJson;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -44,10 +45,11 @@ public interface IJobParametersValidator<T extends IModelJson> {
|
||||||
/**
|
/**
|
||||||
* Validate the given job parameters.
|
* Validate the given job parameters.
|
||||||
*
|
*
|
||||||
|
* @param theRequestDetails The request details associated with the start request
|
||||||
* @param theParameters The parameters object to validate
|
* @param theParameters The parameters object to validate
|
||||||
* @return Any strings returned by this method are treated as validation failures and returned to the client initiating the job. Return <code>null</code> or an empty list to indicate that no validation failures occurred.
|
* @return Any strings returned by this method are treated as validation failures and returned to the client initiating the job. Return <code>null</code> or an empty list to indicate that no validation failures occurred.
|
||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
List<String> validate(@Nonnull T theParameters);
|
List<String> validate(RequestDetails theRequestDetails, @Nonnull T theParameters);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver;
|
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.util.Logs;
|
import ca.uhn.fhir.util.Logs;
|
||||||
|
@ -90,7 +91,7 @@ public class JobCoordinatorImpl implements IJobCoordinator {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Batch2JobStartResponse startInstance(JobInstanceStartRequest theStartRequest) {
|
public Batch2JobStartResponse startInstance(RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) {
|
||||||
String paramsString = theStartRequest.getParameters();
|
String paramsString = theStartRequest.getParameters();
|
||||||
if (isBlank(paramsString)) {
|
if (isBlank(paramsString)) {
|
||||||
throw new InvalidRequestException(Msg.code(2065) + "No parameters supplied");
|
throw new InvalidRequestException(Msg.code(2065) + "No parameters supplied");
|
||||||
|
@ -119,7 +120,7 @@ public class JobCoordinatorImpl implements IJobCoordinator {
|
||||||
JobDefinition<?> jobDefinition = myJobDefinitionRegistry
|
JobDefinition<?> jobDefinition = myJobDefinitionRegistry
|
||||||
.getLatestJobDefinition(theStartRequest.getJobDefinitionId()).orElseThrow(() -> new IllegalArgumentException(Msg.code(2063) + "Unknown job definition ID: " + theStartRequest.getJobDefinitionId()));
|
.getLatestJobDefinition(theStartRequest.getJobDefinitionId()).orElseThrow(() -> new IllegalArgumentException(Msg.code(2063) + "Unknown job definition ID: " + theStartRequest.getJobDefinitionId()));
|
||||||
|
|
||||||
myJobParameterJsonValidator.validateJobParameters(theStartRequest, jobDefinition);
|
myJobParameterJsonValidator.validateJobParameters(theRequestDetails, theStartRequest, jobDefinition);
|
||||||
|
|
||||||
IJobPersistence.CreateResult instanceAndFirstChunk =
|
IJobPersistence.CreateResult instanceAndFirstChunk =
|
||||||
myTransactionService.withSystemRequest().execute(() ->
|
myTransactionService.withSystemRequest().execute(() ->
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.model.api.IModelJson;
|
import ca.uhn.fhir.model.api.IModelJson;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
@ -41,7 +42,7 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||||
class JobParameterJsonValidator {
|
class JobParameterJsonValidator {
|
||||||
private final ValidatorFactory myValidatorFactory = Validation.buildDefaultValidatorFactory();
|
private final ValidatorFactory myValidatorFactory = Validation.buildDefaultValidatorFactory();
|
||||||
|
|
||||||
<PT extends IModelJson> void validateJobParameters(@Nonnull JobInstanceStartRequest theStartRequest, @Nonnull JobDefinition<PT> theJobDefinition) {
|
<PT extends IModelJson> void validateJobParameters(RequestDetails theRequestDetails, @Nonnull JobInstanceStartRequest theStartRequest, @Nonnull JobDefinition<PT> theJobDefinition) {
|
||||||
|
|
||||||
// JSR 380
|
// JSR 380
|
||||||
Validator validator = myValidatorFactory.getValidator();
|
Validator validator = myValidatorFactory.getValidator();
|
||||||
|
@ -52,7 +53,7 @@ class JobParameterJsonValidator {
|
||||||
// Programmatic Validator
|
// Programmatic Validator
|
||||||
IJobParametersValidator<PT> parametersValidator = theJobDefinition.getParametersValidator();
|
IJobParametersValidator<PT> parametersValidator = theJobDefinition.getParametersValidator();
|
||||||
if (parametersValidator != null) {
|
if (parametersValidator != null) {
|
||||||
List<String> outcome = parametersValidator.validate(parameters);
|
List<String> outcome = parametersValidator.validate(theRequestDetails, parameters);
|
||||||
outcome = defaultIfNull(outcome, Collections.emptyList());
|
outcome = defaultIfNull(outcome, Collections.emptyList());
|
||||||
errorStrings.addAll(outcome);
|
errorStrings.addAll(outcome);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.batch2.jobs.chunk;
|
package ca.uhn.fhir.batch2.jobs.chunk;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||||
import ca.uhn.fhir.model.api.IModelJson;
|
import ca.uhn.fhir.model.api.IModelJson;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||||
|
@ -34,13 +35,29 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class ResourceIdListWorkChunkJson implements IModelJson {
|
public class ResourceIdListWorkChunkJson implements IModelJson {
|
||||||
|
|
||||||
|
@JsonProperty("requestPartitionId")
|
||||||
|
private RequestPartitionId myRequestPartitionId;
|
||||||
@JsonProperty("ids")
|
@JsonProperty("ids")
|
||||||
private List<TypedPidJson> myTypedPids;
|
private List<TypedPidJson> myTypedPids;
|
||||||
|
|
||||||
public ResourceIdListWorkChunkJson() {}
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public ResourceIdListWorkChunkJson() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
public ResourceIdListWorkChunkJson(Collection<TypedPidJson> theTypedPids) {
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public ResourceIdListWorkChunkJson(Collection<TypedPidJson> theTypedPids, RequestPartitionId theRequestPartitionId) {
|
||||||
|
this();
|
||||||
getTypedPids().addAll(theTypedPids);
|
getTypedPids().addAll(theTypedPids);
|
||||||
|
myRequestPartitionId = theRequestPartitionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RequestPartitionId getRequestPartitionId() {
|
||||||
|
return myRequestPartitionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<TypedPidJson> getTypedPids() {
|
private List<TypedPidJson> getTypedPids() {
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
package ca.uhn.fhir.batch2.jobs.parameters;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public interface IUrlListValidator {
|
||||||
|
@Nullable
|
||||||
|
List<String> validateUrls(@Nonnull List<String> theUrls);
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
List<String> validatePartitionedUrls(@Nonnull List<PartitionedUrl> thePartitionedUrls);
|
||||||
|
}
|
|
@ -49,7 +49,6 @@ public class PartitionedUrlListJobParameters extends PartitionedJobParameters {
|
||||||
public PartitionedUrlListJobParameters addUrl(@Nonnull String theUrl) {
|
public PartitionedUrlListJobParameters addUrl(@Nonnull String theUrl) {
|
||||||
PartitionedUrl partitionedUrl = new PartitionedUrl();
|
PartitionedUrl partitionedUrl = new PartitionedUrl();
|
||||||
partitionedUrl.setUrl(theUrl);
|
partitionedUrl.setUrl(theUrl);
|
||||||
partitionedUrl.setRequestPartitionId(RequestPartitionId.defaultPartition());
|
|
||||||
return addPartitionedUrl(partitionedUrl);
|
return addPartitionedUrl(partitionedUrl);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class UrlListValidator {
|
public class UrlListValidator implements IUrlListValidator {
|
||||||
private final String myOperationName;
|
private final String myOperationName;
|
||||||
private final IBatch2DaoSvc myBatch2DaoSvc;
|
private final IBatch2DaoSvc myBatch2DaoSvc;
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ public class UrlListValidator {
|
||||||
|
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@Override
|
||||||
public List<String> validateUrls(@Nonnull List<String> theUrls) {
|
public List<String> validateUrls(@Nonnull List<String> theUrls) {
|
||||||
if (theUrls.isEmpty()) {
|
if (theUrls.isEmpty()) {
|
||||||
if (!myBatch2DaoSvc.isAllResourceTypeSupported()) {
|
if (!myBatch2DaoSvc.isAllResourceTypeSupported()) {
|
||||||
|
@ -48,6 +49,7 @@ public class UrlListValidator {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@Override
|
||||||
public List<String> validatePartitionedUrls(@Nonnull List<PartitionedUrl> thePartitionedUrls) {
|
public List<String> validatePartitionedUrls(@Nonnull List<PartitionedUrl> thePartitionedUrls) {
|
||||||
List<String> urls = thePartitionedUrls.stream().map(PartitionedUrl::getUrl).collect(Collectors.toList());
|
List<String> urls = thePartitionedUrls.stream().map(PartitionedUrl::getUrl).collect(Collectors.toList());
|
||||||
return validateUrls(urls);
|
return validateUrls(urls);
|
||||||
|
|
|
@ -48,7 +48,11 @@ public class PartitionedUrlListIdChunkProducer implements IIdChunkProducer<Parti
|
||||||
return myBatch2DaoSvc.fetchResourceIdsPage(theNextStart, theEnd, thePageSize, theRequestPartitionId, null);
|
return myBatch2DaoSvc.fetchResourceIdsPage(theNextStart, theEnd, thePageSize, theRequestPartitionId, null);
|
||||||
} else {
|
} else {
|
||||||
ourLog.info("Fetching resource ID chunk for URL {} - Range {} - {}", partitionedUrl.getUrl(), theNextStart, theEnd);
|
ourLog.info("Fetching resource ID chunk for URL {} - Range {} - {}", partitionedUrl.getUrl(), theNextStart, theEnd);
|
||||||
return myBatch2DaoSvc.fetchResourceIdsPage(theNextStart, theEnd, thePageSize, partitionedUrl.getRequestPartitionId(), partitionedUrl.getUrl());
|
RequestPartitionId requestPartitionId = partitionedUrl.getRequestPartitionId();
|
||||||
|
if (requestPartitionId == null) {
|
||||||
|
requestPartitionId = theRequestPartitionId;
|
||||||
|
}
|
||||||
|
return myBatch2DaoSvc.fetchResourceIdsPage(theNextStart, theEnd, thePageSize, requestPartitionId, partitionedUrl.getUrl());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,24 +117,24 @@ public class ResourceIdListStep<PT extends PartitionedJobParameters, IT extends
|
||||||
|
|
||||||
totalIdsFound += submissionIds.size();
|
totalIdsFound += submissionIds.size();
|
||||||
chunkCount++;
|
chunkCount++;
|
||||||
submitWorkChunk(submissionIds, theDataSink);
|
submitWorkChunk(submissionIds, nextChunk.getRequestPartitionId(), theDataSink);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
totalIdsFound += idBuffer.size();
|
totalIdsFound += idBuffer.size();
|
||||||
chunkCount++;
|
chunkCount++;
|
||||||
submitWorkChunk(idBuffer, theDataSink);
|
submitWorkChunk(idBuffer, requestPartitionId, theDataSink);
|
||||||
|
|
||||||
ourLog.info("Submitted {} chunks with {} resource IDs", chunkCount, totalIdsFound);
|
ourLog.info("Submitted {} chunks with {} resource IDs", chunkCount, totalIdsFound);
|
||||||
return RunOutcome.SUCCESS;
|
return RunOutcome.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void submitWorkChunk(Collection<TypedPidJson> theTypedPids, IJobDataSink<ResourceIdListWorkChunkJson> theDataSink) {
|
private void submitWorkChunk(Collection<TypedPidJson> theTypedPids, RequestPartitionId theRequestPartitionId, IJobDataSink<ResourceIdListWorkChunkJson> theDataSink) {
|
||||||
if (theTypedPids.isEmpty()) {
|
if (theTypedPids.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ourLog.info("Submitting work chunk with {} IDs", theTypedPids.size());
|
ourLog.info("Submitting work chunk with {} IDs", theTypedPids.size());
|
||||||
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(theTypedPids);
|
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(theTypedPids, theRequestPartitionId);
|
||||||
ourLog.debug("IDs are: {}", data);
|
ourLog.debug("IDs are: {}", data);
|
||||||
theDataSink.accept(data);
|
theDataSink.accept(data);
|
||||||
}
|
}
|
||||||
|
|
|
@ -521,7 +521,7 @@ public class JobCoordinatorImplTest extends BaseBatch2Test {
|
||||||
|
|
||||||
// Setup
|
// Setup
|
||||||
|
|
||||||
IJobParametersValidator<TestJobParameters> v = p -> {
|
IJobParametersValidator<TestJobParameters> v = (theRequestDetails, p) -> {
|
||||||
if (p.getParam1().equals("bad")) {
|
if (p.getParam1().equals("bad")) {
|
||||||
return Lists.newArrayList("Bad Parameter Value", "Bad Parameter Value 2");
|
return Lists.newArrayList("Bad Parameter Value", "Bad Parameter Value 2");
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,7 +109,7 @@ public class LoadIdsStepTest {
|
||||||
for (long i = idLow; i < idHigh; i++) {
|
for (long i = idLow; i < idHigh; i++) {
|
||||||
ids.add(JpaPid.fromId(i));
|
ids.add(JpaPid.fromId(i));
|
||||||
}
|
}
|
||||||
IResourcePidList chunk = new HomogeneousResourcePidList("Patient", ids, lastDate);
|
IResourcePidList chunk = new HomogeneousResourcePidList("Patient", ids, lastDate, null);
|
||||||
return chunk;
|
return chunk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.mdm.batch2.clear;
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
@ -41,7 +42,7 @@ public class MdmClearJobParametersValidator implements IJobParametersValidator<M
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull MdmClearJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull MdmClearJobParameters theParameters) {
|
||||||
if (myMdmSettings == null || !myMdmSettings.isEnabled()) {
|
if (myMdmSettings == null || !myMdmSettings.isEnabled()) {
|
||||||
return Collections.singletonList("Mdm is not enabled on this server");
|
return Collections.singletonList("Mdm is not enabled on this server");
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,14 +23,12 @@ import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -49,7 +47,7 @@ public class MdmSubmitJobParametersValidator implements IJobParametersValidator<
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public List<String> validate(@Nonnull MdmSubmitJobParameters theParameters) {
|
public List<String> validate(RequestDetails theRequestDetails, @Nonnull MdmSubmitJobParameters theParameters) {
|
||||||
List<String> errorMsgs = new ArrayList<>();
|
List<String> errorMsgs = new ArrayList<>();
|
||||||
for (PartitionedUrl partitionedUrl : theParameters.getPartitionedUrls()) {
|
for (PartitionedUrl partitionedUrl : theParameters.getPartitionedUrls()) {
|
||||||
String url = partitionedUrl.getUrl();
|
String url = partitionedUrl.getUrl();
|
||||||
|
|
|
@ -38,7 +38,7 @@ class MdmClearJobParametersValidatorTest {
|
||||||
MdmClearJobParameters parameters = new MdmClearJobParameters();
|
MdmClearJobParameters parameters = new MdmClearJobParameters();
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
List<String> result = myMdmClearJobParametersValidator.validate(parameters);
|
List<String> result = myMdmClearJobParametersValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertThat(result, hasSize(1));
|
assertThat(result, hasSize(1));
|
||||||
|
@ -52,7 +52,7 @@ class MdmClearJobParametersValidatorTest {
|
||||||
when(myMdmSettings.isEnabled()).thenReturn(true);
|
when(myMdmSettings.isEnabled()).thenReturn(true);
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
List<String> result = myMdmClearJobParametersValidator.validate(parameters);
|
List<String> result = myMdmClearJobParametersValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertThat(result, hasSize(1));
|
assertThat(result, hasSize(1));
|
||||||
|
@ -69,7 +69,7 @@ class MdmClearJobParametersValidatorTest {
|
||||||
when(myMdmSettings.getMdmRules()).thenReturn(rules);
|
when(myMdmSettings.getMdmRules()).thenReturn(rules);
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
List<String> result = myMdmClearJobParametersValidator.validate(parameters);
|
List<String> result = myMdmClearJobParametersValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertThat(result, hasSize(2));
|
assertThat(result, hasSize(2));
|
||||||
|
@ -88,7 +88,7 @@ class MdmClearJobParametersValidatorTest {
|
||||||
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true);
|
when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true);
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
List<String> result = myMdmClearJobParametersValidator.validate(parameters);
|
List<String> result = myMdmClearJobParametersValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
assertThat(result, hasSize(0));
|
assertThat(result, hasSize(0));
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
package ca.uhn.fhir.mdm.batch2.submit;
|
package ca.uhn.fhir.mdm.batch2.submit;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
import ca.uhn.fhir.mdm.api.IMdmSettings;
|
||||||
import ca.uhn.fhir.mdm.rules.json.MdmRulesJson;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
@ -48,7 +46,7 @@ class MdmSubmitJobParametersValidatorTest {
|
||||||
|
|
||||||
MdmSubmitJobParameters parameters = new MdmSubmitJobParameters();
|
MdmSubmitJobParameters parameters = new MdmSubmitJobParameters();
|
||||||
parameters.addUrl("Practitioner?name=foo");
|
parameters.addUrl("Practitioner?name=foo");
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
assertThat(errors, hasSize(1));
|
assertThat(errors, hasSize(1));
|
||||||
assertThat(errors.get(0), is(equalTo("Resource type Practitioner is not supported by MDM. Check your MDM settings")));
|
assertThat(errors.get(0), is(equalTo("Resource type Practitioner is not supported by MDM. Check your MDM settings")));
|
||||||
}
|
}
|
||||||
|
@ -59,7 +57,7 @@ class MdmSubmitJobParametersValidatorTest {
|
||||||
when(myMatchUrlService.translateMatchUrl(anyString(), any())).thenThrow(new InvalidRequestException("Can't find death-date!"));
|
when(myMatchUrlService.translateMatchUrl(anyString(), any())).thenThrow(new InvalidRequestException("Can't find death-date!"));
|
||||||
MdmSubmitJobParameters parameters = new MdmSubmitJobParameters();
|
MdmSubmitJobParameters parameters = new MdmSubmitJobParameters();
|
||||||
parameters.addUrl("Practitioner?death-date=foo");
|
parameters.addUrl("Practitioner?death-date=foo");
|
||||||
List<String> errors = myValidator.validate(parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
assertThat(errors, hasSize(1));
|
assertThat(errors, hasSize(1));
|
||||||
assertThat(errors.get(0), is(equalTo("Invalid request detected: Can't find death-date!")));
|
assertThat(errors.get(0), is(equalTo("Invalid request detected: Can't find death-date!")));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1823,6 +1823,7 @@ public class JpaStorageSettings extends StorageSettings {
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* This determines the maximum number of conflicts that should be fetched and handled while retrying a delete of a resource.
|
* This determines the maximum number of conflicts that should be fetched and handled while retrying a delete of a resource.
|
||||||
|
* This can also be thought of as the maximum number of rounds of cascading deletion.
|
||||||
* </p>
|
* </p>
|
||||||
* <p>
|
* <p>
|
||||||
* The default value for this setting is {@code 60}.
|
* The default value for this setting is {@code 60}.
|
||||||
|
@ -1837,6 +1838,7 @@ public class JpaStorageSettings extends StorageSettings {
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* This determines the maximum number of conflicts that should be fetched and handled while retrying a delete of a resource.
|
* This determines the maximum number of conflicts that should be fetched and handled while retrying a delete of a resource.
|
||||||
|
* This can also be thought of as the maximum number of rounds of cascading deletion.
|
||||||
* </p>
|
* </p>
|
||||||
* <p>
|
* <p>
|
||||||
* The default value for this setting is {@code 60}.
|
* The default value for this setting is {@code 60}.
|
||||||
|
|
|
@ -19,15 +19,12 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.api.pid;
|
package ca.uhn.fhir.jpa.api.pid;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
abstract public class BaseResourcePidList implements IResourcePidList {
|
abstract public class BaseResourcePidList implements IResourcePidList {
|
||||||
|
|
||||||
|
@ -35,10 +32,17 @@ abstract public class BaseResourcePidList implements IResourcePidList {
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
final Date myLastDate;
|
final Date myLastDate;
|
||||||
|
private final RequestPartitionId myRequestPartitionId;
|
||||||
|
|
||||||
BaseResourcePidList(Collection<IResourcePersistentId> theIds, Date theLastDate) {
|
BaseResourcePidList(Collection<IResourcePersistentId> theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) {
|
||||||
myIds.addAll(theIds);
|
myIds.addAll(theIds);
|
||||||
myLastDate = theLastDate;
|
myLastDate = theLastDate;
|
||||||
|
myRequestPartitionId = theRequestPartitionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public RequestPartitionId getRequestPartitionId() {
|
||||||
|
return myRequestPartitionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package ca.uhn.fhir.jpa.api.pid;
|
package ca.uhn.fhir.jpa.api.pid;
|
||||||
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
|
|
||||||
|
@ -32,6 +33,11 @@ import java.util.List;
|
||||||
* An empty resource pid list
|
* An empty resource pid list
|
||||||
*/
|
*/
|
||||||
public class EmptyResourcePidList implements IResourcePidList {
|
public class EmptyResourcePidList implements IResourcePidList {
|
||||||
|
@Override
|
||||||
|
public RequestPartitionId getRequestPartitionId() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Date getLastDate() {
|
public Date getLastDate() {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.api.pid;
|
package ca.uhn.fhir.jpa.api.pid;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
@ -32,8 +33,8 @@ public class HomogeneousResourcePidList extends BaseResourcePidList {
|
||||||
@Nonnull
|
@Nonnull
|
||||||
final String myResourceType;
|
final String myResourceType;
|
||||||
|
|
||||||
public HomogeneousResourcePidList(String theResourceType, Collection<IResourcePersistentId> theIds, Date theLastDate) {
|
public HomogeneousResourcePidList(String theResourceType, Collection<IResourcePersistentId> theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) {
|
||||||
super(theIds, theLastDate);
|
super(theIds, theLastDate, theRequestPartitionId);
|
||||||
myResourceType = theResourceType;
|
myResourceType = theResourceType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.jpa.api.pid;
|
package ca.uhn.fhir.jpa.api.pid;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
@ -30,6 +31,8 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public interface IResourcePidList {
|
public interface IResourcePidList {
|
||||||
|
|
||||||
|
RequestPartitionId getRequestPartitionId();
|
||||||
|
|
||||||
Date getLastDate();
|
Date getLastDate();
|
||||||
|
|
||||||
int size();
|
int size();
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue