added architecture for git expunge svc

This commit is contained in:
leif stawnyczy 2023-06-09 11:52:18 -04:00
parent 0a7f97adb7
commit 27b52af892
11 changed files with 63 additions and 7 deletions

View File

@ -0,0 +1,16 @@
package ca.uhn.fhir.jpa.bulk.mdm;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.springframework.beans.factory.annotation.Autowired;
public class MDMClearHelperSvcImpl implements IMdmClearHelperSvc {
@Autowired
IDeleteExpungeSvc myDeleteExpungeSvc;
@Override
public IDeleteExpungeSvc<? extends IResourcePersistentId<?>> getDeleteExpungeService() {
return myDeleteExpungeSvc;
}
}

View File

@ -41,6 +41,8 @@ import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
import ca.uhn.fhir.jpa.bulk.mdm.IMdmClearHelperSvc;
import ca.uhn.fhir.jpa.bulk.mdm.MDMClearHelperSvcImpl;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
@ -831,4 +833,9 @@ public class JpaConfig {
public ISearchUrlJobMaintenanceSvc searchUrlJobMaintenanceSvc(ResourceSearchUrlSvc theResourceSearchUrlSvc){
return new SearchUrlJobMaintenanceSvcImpl(theResourceSearchUrlSvc);
}
@Bean
public IMdmClearHelperSvc helperSvc() {
return new MDMClearHelperSvcImpl();
}
}

View File

@ -49,10 +49,10 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
DeleteExpungeSqlBuilder.DeleteExpungeSqlResult sqlResult = myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(theJpaPids, theCascade, theCascadeMaxRounds);
List<String> sqlList = sqlResult.getSqlStatements();
ourLog.debug("Executing {} delete expunge sql commands", sqlList.size());
ourLog.info("Executing {} delete expunge sql commands", sqlList.size());
long totalDeleted = 0;
for (String sql : sqlList) {
ourLog.trace("Executing sql " + sql);
ourLog.info("Executing sql " + sql);
totalDeleted += myEntityManager.createNativeQuery(sql).executeUpdate();
}

View File

@ -81,7 +81,10 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@Configuration
@Import({MdmCommonConfig.class, MdmBatch2Config.class})
@Import({
MdmCommonConfig.class,
MdmBatch2Config.class
})
public class MdmConsumerConfig {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();

View File

@ -13,5 +13,4 @@ public class JpaEntityTest {
);
}
}

View File

@ -70,10 +70,11 @@ public class StepExecutor {
return false;
} catch (Exception e) {
if (theStepExecutionDetails.hasAssociatedWorkChunk()) {
ourLog.error("Failure executing job {} step {}, marking chunk {} as ERRORED", jobDefinitionId, targetStepId, chunkId, e);
ourLog.info("Failure executing job {} step {}, marking chunk {} as ERRORED. Retrying", jobDefinitionId, targetStepId, chunkId, e);
WorkChunkErrorEvent parameters = new WorkChunkErrorEvent(chunkId, e.getMessage());
WorkChunkStatusEnum newStatus = myJobPersistence.onWorkChunkError(parameters);
if (newStatus == WorkChunkStatusEnum.FAILED) {
ourLog.error("Failure executing job {} step {}, marking chunk {} as ERRORED. Retries expired", jobDefinitionId, targetStepId, chunkId, e);
return false;
}
} else {

View File

@ -30,7 +30,9 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.bulk.mdm.IMdmClearHelperSvc;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
@ -68,6 +70,9 @@ public class MdmClearStep implements IJobStepWorker<MdmClearJobParameters, Resou
@Autowired
IMdmLinkDao myMdmLinkSvc;
@Autowired
IMdmClearHelperSvc myIMdmClearHelperSvc;
@Nonnull
@Override
public RunOutcome run(@Nonnull StepExecutionDetails<MdmClearJobParameters, ResourceIdListWorkChunkJson> theStepExecutionDetails, @Nonnull IJobDataSink<VoidModel> theDataSink) throws JobExecutionFailedException {
@ -126,10 +131,16 @@ public class MdmClearStep implements IJobStepWorker<MdmClearJobParameters, Resou
StopWatch sw = new StopWatch();
myMdmLinkSvc.deleteLinksWithAnyReferenceToPids(thePersistentIds);
ourLog.trace("Deleted {} mdm links in {}", thePersistentIds.size(), StopWatch.formatMillis(sw.getMillis()));
ourLog.info("Deleted {} mdm links in {}", thePersistentIds.size(), StopWatch.formatMillis(sw.getMillis()));
// We know the list is not empty, and that all resource types are the same, so just use the first one
String resourceName = myData.getResourceType(0);
// IDeleteExpungeSvc deleteExpungeSvc = myIMdmClearHelperSvc.getDeleteExpungeService();
// deleteExpungeSvc.deleteExpunge(
// thePersistentIds,
// false,
// null
// );
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceName);
DeleteConflictList conflicts = new DeleteConflictList();

View File

@ -321,6 +321,8 @@ public class JpaStorageSettings extends StorageSettings {
*/
public JpaStorageSettings() {
setMarkResourcesForReindexingUponSearchParameterChange(true);
// setReindexThreadCount(1);
// setExpungeThreadCount(1);
setReindexThreadCount(Runtime.getRuntime().availableProcessors());
setExpungeThreadCount(Runtime.getRuntime().availableProcessors());
setBundleTypesAllowedForStorage(DEFAULT_BUNDLE_TYPES_ALLOWED_FOR_STORAGE);

View File

@ -0,0 +1,9 @@
package ca.uhn.fhir.jpa.bulk.mdm;
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
public interface IMdmClearHelperSvc {
IDeleteExpungeSvc<? extends IResourcePersistentId<?>> getDeleteExpungeService();
}

View File

@ -91,6 +91,9 @@ public class JpaModelScannerAndVerifier {
"FK_SEARCHINC_SEARCH"
);
private static Set<String> ourReservedWords;
public JpaModelScannerAndVerifier() {
super();
}
@ -264,7 +267,12 @@ public class JpaModelScannerAndVerifier {
scanClassOrSuperclass(theNames, theClazz.getSuperclass(), true, columnNameToLength);
}
private void scan(AnnotatedElement theAnnotatedElement, Set<String> theNames, boolean theIsSuperClass, boolean theIsView) {
private void scan(
AnnotatedElement theAnnotatedElement,
Set<String> theNames,
boolean theIsSuperClass,
boolean theIsView
) {
Table table = theAnnotatedElement.getAnnotation(Table.class);
if (table != null) {