Issue 3237 delete expunge performance (#3239)

* Initial test looking for bad deletes

* Add indexes, add migration, add test'

* Fix npe

* Add provider test

* fix up comments, tighten test

* Update hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java

Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>

* Add changelog

* Remove unnecessary migration

Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>
This commit is contained in:
Tadgh 2021-12-14 22:53:26 -08:00 committed by GitHub
parent ef83777115
commit 9e20d62380
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 261 additions and 20 deletions

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 3153
jira: SMILE-3502
title: "Significantly improved $delete-expunge performance by adding database indexes, and filtering needed foreign keys to delete by resource type."

View File

@ -1376,6 +1376,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
.orElse(null);
}
}
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
if (haveSource || haveRequestId) {

View File

@ -20,24 +20,33 @@ package ca.uhn.fhir.jpa.dao.expunge;
* #L%
*/
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
@Service
public class ResourceTableFKProvider {
@Autowired(required = false)
IMdmSettings myMdmSettings;
@Nonnull
public List<ResourceForeignKey> getResourceForeignKeys() {
List<ResourceForeignKey> retval = new ArrayList<>();
// Add some secondary related records that don't have foreign keys
retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
// To find all the FKs that need to be included here, run the following SQL in the INFORMATION_SCHEMA:
// SELECT FKTABLE_NAME, FKCOLUMN_NAME FROM CROSS_REFERENCES WHERE PKTABLE_NAME = 'HFJ_RESOURCE'
// Add some secondary related records that don't have foreign keys
retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));//NOT covered by index.
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
//These have the possibility of touching all resource types.
retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID"));
retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_IDX_CMB_TOK_NU", "RES_ID"));
@ -46,7 +55,6 @@ public class ResourceTableFKProvider {
retval.add(new ResourceForeignKey("HFJ_RES_PARAM_PRESENT", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_TAG", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_COORDS", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_DATE", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_NUMBER", "RES_ID"));
@ -55,17 +63,72 @@ public class ResourceTableFKProvider {
retval.add(new ResourceForeignKey("HFJ_SPIDX_STRING", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_TOKEN", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_URI", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID"));
retval.add(new ResourceForeignKey("MPI_LINK", "GOLDEN_RESOURCE_PID"));
retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID"));
retval.add(new ResourceForeignKey("MPI_LINK", "PERSON_PID"));
//These only touch certain resource types.
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID"));
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER", "BINARY_RES_ID"));
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER_RES", "BINARY_RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID"));
return retval;
}
@Nonnull
public List<ResourceForeignKey> getResourceForeignKeysByResourceType(String theResourceType) {
List<ResourceForeignKey> retval = new ArrayList<>();
//These have the possibility of touching all resource types.
retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID"));
retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_IDX_CMB_TOK_NU", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "SRC_RESOURCE_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "TARGET_RESOURCE_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_PARAM_PRESENT", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_TAG", "RES_ID"));//TODO GGG: Res_ID + TAG_ID? is that enough?
retval.add(new ResourceForeignKey("HFJ_RES_VER", "RES_ID"));//TODO GGG: RES_ID + updated? is that enough?
retval.add(new ResourceForeignKey("HFJ_SPIDX_COORDS", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_DATE", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_NUMBER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_QUANTITY", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_QUANTITY_NRML", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_STRING", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_TOKEN", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_URI", "RES_ID"));
if (myMdmSettings != null && myMdmSettings.isEnabled()) {
retval.add(new ResourceForeignKey("MPI_LINK", "GOLDEN_RESOURCE_PID"));//NOT covered by index.
retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID"));//Possibly covered, partial index
retval.add(new ResourceForeignKey("MPI_LINK", "PERSON_PID"));//TODO GGG: I don't even think we need this... this field is deprecated, and the deletion is covered by GOLDEN_RESOURCE_PID
}
switch (theResourceType.toLowerCase()) {
case "binary":
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER", "BINARY_RES_ID"));//Not covered
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER_RES", "BINARY_RES_ID"));//Not covered
break;
case "subscription":
retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID"));//Covered by index.
break;
case "codesystem":
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));//Not covered
retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID"));//Not covered
break;
case "valueset":
retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID"));//Not covered
break;
case "conceptmap":
retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID"));//Not covered
break;
default:
}
return retval;
}
}

View File

@ -28,6 +28,8 @@ import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
@ -65,8 +67,13 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
List<String> retval = new ArrayList<>();
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
String pidListString = "(" + thePids.stream().map(Object::toString).collect(Collectors.joining(",")) + ")";
//Given the first pid in the last, grab the resource type so we can filter out which FKs we care about.
//TODO GGG should we pass this down the pipe?
IIdType iIdType = myIdHelper.resourceIdFromPidOrThrowException(thePids.get(0));
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeysByResourceType(iIdType.getResourceType());
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
retval.add(deleteRecordsByColumnSql(pidListString, resourceForeignKey));

View File

@ -46,7 +46,12 @@ import java.util.Date;
@Entity
@Table(name = "MPI_LINK", uniqueConstraints = {
// TODO GGG DROP this index, and instead use the below one
@UniqueConstraint(name = "IDX_EMPI_PERSON_TGT", columnNames = {"PERSON_PID", "TARGET_PID"}),
// v---- this one
@UniqueConstraint(name = "IDX_EMPI_GR_TGT", columnNames = {"GOLDEN_RESOURCE_PID", "TARGET_PID"}),
//TODO GGG Should i make individual indices for PERSON/TARGET?
})
public class MdmLink implements IMdmLink {
public static final int VERSION_LENGTH = 16;

View File

@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresent;
import ca.uhn.fhir.util.VersionEnum;
import javax.persistence.Index;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
@ -85,6 +86,25 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
}
/**
* See https://github.com/hapifhir/hapi-fhir/issues/3237 for reasoning for these indexes.
* This adds indexes to various tables to enhance delete-expunge performance, which does deletes by PID.
*/
private void addIndexesForDeleteExpunge(Builder theVersion) {
theVersion.onTable( "HFJ_HISTORY_TAG")
.addIndex("20211210.2", "IDX_RESHISTTAG_RESID" )
.unique(false)
.withColumns("RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
theVersion.onTable( "HFJ_RES_VER_PROV")
.addIndex("20211210.3", "IDX_RESVERPROV_RESID" )
.unique(false)
.withColumns("RES_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
}
private void init570() {
Builder version = forVersion(VersionEnum.V5_7_0);
@ -116,6 +136,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.withColumns("PARENT_PID")
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
addIndexesForDeleteExpunge(version);
}

View File

@ -33,8 +33,8 @@ class ResourceTableFKProviderTest extends BaseJpaR4Test {
// Add the extra FKs that are not available in the CROSS_REFERENCES table
expected.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
expected.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
expected.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
//expected.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
//expected.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
// If this assertion fails, it means hapi-fhir has added a new foreign-key dependency to HFJ_RESOURCE. To fix
// the test, add the missing key to myResourceTableFKProvider.getResourceForeignKeys()
assertThat(myResourceTableFKProvider.getResourceForeignKeys(), containsInAnyOrder(expected.toArray()));

View File

@ -1423,7 +1423,6 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(1, myPatientDao.search(m2).size().intValue());
}
@Test
public void testReferenceOrLinksUseInList_ForcedIds() {
@ -1517,7 +1516,8 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
// Ensure that the search actually worked
assertEquals(5, search.size().intValue());
}
}

View File

@ -0,0 +1,76 @@
package ca.uhn.fhir.jpa.delete.provider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.test.utilities.JettyUtil;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@ExtendWith(MockitoExtension.class)
class DeleteExpungeProviderTest {
@Mock
private IDeleteExpungeJobSubmitter myJobSubmitter;
private Server myServer;
private FhirContext myCtx;
private int myPort;
private CloseableHttpClient myClient;
@BeforeEach
public void start() throws Exception {
myCtx = FhirContext.forR4Cached();
myServer = new Server(0);
DeleteExpungeProvider provider = new DeleteExpungeProvider(myCtx, myJobSubmitter);
ServletHandler proxyHandler = new ServletHandler();
RestfulServer servlet = new RestfulServer(myCtx);
servlet.registerProvider(provider);
ServletHolder servletHolder = new ServletHolder(servlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
myServer.setHandler(proxyHandler);
JettyUtil.startServer(myServer);
myPort = JettyUtil.getPortForStartedServer(myServer);
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
myClient = builder.build();
}
@Test
public void testSupplyingNoUrlsProvidesValidErrorMessage() throws IOException {
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + ProviderConstants.OPERATION_DELETE_EXPUNGE);
try(CloseableHttpResponse execute = myClient.execute(post)) {
String body = IOUtils.toString(execute.getEntity().getContent(), Charset.defaultCharset());
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
assertThat(body, is(containsString("At least one `url` parameter to $delete-expunge must be provided.")));
}
}
}

View File

@ -2,17 +2,23 @@ package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.SqlQuery;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
@ -24,16 +30,19 @@ import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.hapi.rest.server.helper.BatchHelperR4;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.ListResource;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
@ -60,7 +69,11 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@ -81,6 +94,8 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
@Autowired
private DatabaseBackedPagingProvider myPagingProvider;
private int myPreviousMaxPageSize;
@Autowired
private BatchJobHelper myBatchJobHelper;
@Override
@AfterEach
@ -262,7 +277,6 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
}
assertEquals(count - 1000, ids.size());
assertEquals(count - 1000, Sets.newHashSet(ids).size());
}
@Test
@ -559,6 +573,43 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
validateNoErrors(tasks);
}
@Test
public void testDeleteExpungeOperationOverLargeDataset() {
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setDeleteExpungeEnabled(true);
// setup
Patient patient = new Patient();
patient.setId("tracer");
patient.setActive(true);
patient.getMeta().addTag().setSystem(UUID.randomUUID().toString()).setCode(UUID.randomUUID().toString());
MethodOutcome result = myClient.update().resource(patient).execute();
patient.setId(result.getId());
patient.getMeta().addTag().setSystem(UUID.randomUUID().toString()).setCode(UUID.randomUUID().toString());
result = myClient.update().resource(patient).execute();
Parameters input = new Parameters();
input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=true");
int batchSize = 2;
input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize));
// execute
Parameters response = myClient
.operation()
.onServer()
.named(ProviderConstants.OPERATION_DELETE_EXPUNGE)
.withParameters(input)
.execute();
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response));
myBatchJobHelper.awaitAllBulkJobCompletions(BatchConstants.DELETE_EXPUNGE_JOB_NAME);
int deleteCount = myCaptureQueriesListener.countDeleteQueries();
myCaptureQueriesListener.logDeleteQueries();
assertThat(deleteCount, is(equalTo(19)));
}
private void validateNoErrors(List<BaseTask> tasks) {
int total = 0;

View File

@ -56,7 +56,9 @@ import javax.persistence.UniqueConstraint;
* - IDX_FORCEDID_TYPE_RESID
* so don't reuse these names
*/
@Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID")
@Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID"),
@Index(name = "IDX_FORCEID_RESID", columnList = "RESOURCE_PID"),
//TODO GGG potentiall add a type + res_id index here, specifically for deletion?
})
public class ForcedId extends BasePartitionable {

View File

@ -38,7 +38,8 @@ import javax.persistence.Table;
@Table(name = "HFJ_RES_VER_PROV", indexes = {
@Index(name = "IDX_RESVERPROV_SOURCEURI", columnList = "SOURCE_URI"),
@Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID")
@Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID"),
@Index(name = "IDX_RESVERPROV_RESID", columnList = "RES_PID")
})
@Entity
public class ResourceHistoryProvenanceEntity extends BasePartitionable {

View File

@ -27,6 +27,7 @@ import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
@ -37,7 +38,9 @@ import java.io.Serializable;
@Embeddable
@Entity
@Table(name = "HFJ_HISTORY_TAG", uniqueConstraints = {
@UniqueConstraint(name = "IDX_RESHISTTAG_TAGID", columnNames = {"RES_VER_PID", "TAG_ID"})
@UniqueConstraint(name = "IDX_RESHISTTAG_TAGID", columnNames = {"RES_VER_PID", "TAG_ID"}),
}, indexes = {
@Index(name = "IDX_RESHISTTAG_RESID", columnList="RES_ID")
})
public class ResourceHistoryTag extends BaseTag implements Serializable {

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -45,6 +46,9 @@ public class DeleteExpungeProvider {
@OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType<BigDecimal> theBatchSize,
RequestDetails theRequestDetails
) {
if (theUrlsToDeleteExpunge == null) {
throw new InvalidRequestException("At least one `url` parameter to $delete-expunge must be provided.");
}
List<String> urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList());
Integer batchSize = myMultiUrlProcessor.getBatchSize(theBatchSize);
return myMultiUrlProcessor.processUrls(urls, batchSize, theRequestDetails);