diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4486-mdm-inconsistent-possible-match-scores.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4486-mdm-inconsistent-possible-match-scores.yaml new file mode 100644 index 00000000000..ef6e8cc5134 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4486-mdm-inconsistent-possible-match-scores.yaml @@ -0,0 +1,3 @@ +type: fix +issue: 4486 +title: "Previously, some MDM links of type `POSSIBLE_MATCH` were saved with unnormalized score values. This has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-allow-jobs-in-finalize-to-be-cancelled.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-allow-jobs-in-finalize-to-be-cancelled.yaml new file mode 100644 index 00000000000..892d374672e --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-allow-jobs-in-finalize-to-be-cancelled.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 4491 +title: "Batch2 Jobs in the FINALIZE state can now be + cancelled." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-move-reduction-step-to-messaging-framework.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-move-reduction-step-to-messaging-framework.yaml new file mode 100644 index 00000000000..c958c9be329 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_4_0/4491-move-reduction-step-to-messaging-framework.yaml @@ -0,0 +1,8 @@ +--- +type: fix +issue: 4491 +title: "Moved batch2 reduction step logic to the messaging queue. + Before it was executed during the maintenance run directly. + This resulted in bugs with multiple reduction steps kicking + off for long running jobs. + " diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java index 7d9d4e38fd8..a6e4e909fd6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java @@ -150,6 +150,9 @@ public class IdHelperService implements IIdHelperService { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); assert theRequestPartitionId != null; + if (theResourceId.contains("/")) { + theResourceId = theResourceId.substring(theResourceId.indexOf("/") + 1); + } IdDt id = new IdDt(theResourceType, theResourceId); Map>> matches = translateForcedIdToPids(theRequestPartitionId, Collections.singletonList(id), diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java index dc257d64787..b3e9b4c25c8 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java @@ -73,11 +73,9 @@ public class MdmLinkDaoSvc

getLinkByGoldenResourcePidAndSourceResourcePid(P theGoldenResourcePid, P theSourceResourcePid) { if (theSourceResourcePid == null || theGoldenResourcePid == null) { return Optional.empty(); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java index 6c50ac1f9b9..51a4304c44a 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java @@ -94,7 +94,7 @@ public class MdmMatchLinkSvc { private void handleMdmWithMultipleCandidates(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { MatchedGoldenResourceCandidate firstMatch = theCandidateList.getFirstMatch(); - IResourcePersistentId sampleGoldenResourcePid = firstMatch.getCandidateGoldenResourcePid(); + IResourcePersistentId sampleGoldenResourcePid = firstMatch.getCandidateGoldenResourcePid(); boolean allSameGoldenResource = theCandidateList.stream() .allMatch(candidate -> candidate.getCandidateGoldenResourcePid().equals(sampleGoldenResourcePid)); @@ -105,17 +105,7 @@ public class MdmMatchLinkSvc { log(theMdmTransactionContext, "MDM received multiple match candidates, that were linked to different Golden Resources. Setting POSSIBLE_DUPLICATES and POSSIBLE_MATCHES."); //Set them all as POSSIBLE_MATCH - List goldenResources = new ArrayList<>(); - for (MatchedGoldenResourceCandidate matchedGoldenResourceCandidate : theCandidateList.getCandidates()) { - IAnyResource goldenResource = myMdmGoldenResourceFindingSvc - .getGoldenResourceFromMatchedGoldenResourceCandidate(matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); - MdmMatchOutcome outcome = new MdmMatchOutcome(matchedGoldenResourceCandidate.getMatchResult().vector, - matchedGoldenResourceCandidate.getMatchResult().getNormalizedScore()); - outcome.setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH); - outcome.setEidMatch(theCandidateList.isEidMatch()); - myMdmLinkSvc.updateLink(goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); - goldenResources.add(goldenResource); - } + List goldenResources = createPossibleMatches(theResource, theCandidateList, theMdmTransactionContext); //Set all GoldenResources as POSSIBLE_DUPLICATE of the last GoldenResource. IAnyResource firstGoldenResource = goldenResources.get(0); @@ -129,6 +119,26 @@ public class MdmMatchLinkSvc { } } + private List createPossibleMatches(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { + List goldenResources = new ArrayList<>(); + + for (MatchedGoldenResourceCandidate matchedGoldenResourceCandidate : theCandidateList.getCandidates()) { + IAnyResource goldenResource = myMdmGoldenResourceFindingSvc + .getGoldenResourceFromMatchedGoldenResourceCandidate(matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); + + MdmMatchOutcome outcome = new MdmMatchOutcome(matchedGoldenResourceCandidate.getMatchResult().getVector(), + matchedGoldenResourceCandidate.getMatchResult().getScore()) + .setMdmRuleCount( matchedGoldenResourceCandidate.getMatchResult().getMdmRuleCount()); + + outcome.setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH); + outcome.setEidMatch(theCandidateList.isEidMatch()); + myMdmLinkSvc.updateLink(goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + goldenResources.add(goldenResource); + } + + return goldenResources; + } + private void handleMdmWithNoCandidates(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { log(theMdmTransactionContext, String.format("There were no matched candidates for MDM, creating a new %s Golden Resource.", theResource.getIdElement().getResourceType())); IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext); diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java index 49646c4e7ec..b9b20e7ecdc 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java @@ -100,7 +100,6 @@ public class MdmLinkDaoSvcTest extends BaseMdmR4Test { mdmLink.setUpdated(new Date()); mdmLink.setGoldenResourcePersistenceId(JpaPid.fromId(thePatientPid)); mdmLink.setSourcePersistenceId(runInTransaction(()->myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), patient))); - MdmLink saved= myMdmLinkDao.save(mdmLink); - return saved; + return myMdmLinkDao.save(mdmLink); } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java index a8afac6393c..35f1fb7aa28 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java @@ -10,6 +10,7 @@ import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.test.utilities.RangeTestHelper; import ca.uhn.fhir.util.ParametersUtil; import ca.uhn.fhir.util.StopWatch; import org.apache.commons.lang3.StringUtils; @@ -377,7 +378,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test { List list = getParametersByName(result, "link"); assertThat(list, hasSize(4)); List part = list.get(3).getPart(); - assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, goldenResourceId.getValue(), patientId.getValue(), MdmMatchResultEnum.MATCH, "false", "false", "2"); + assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, goldenResourceId.getValue(), patientId.getValue(), MdmMatchResultEnum.MATCH, "false", "false", ".666"); } @Test @@ -459,7 +460,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test { assertThat(thePart.get(5).getValue().primitiveValue(), is(theNewGoldenResource)); assertThat(thePart.get(6).getName(), is("score")); - assertThat(thePart.get(6).getValue().primitiveValue(), is(theScore)); + RangeTestHelper.checkInRange(theScore, thePart.get(6).getValue().primitiveValue()); } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java index 5dcbfa4739b..c514f162825 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java @@ -16,6 +16,7 @@ import java.util.List; import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.MATCH; import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.NO_MATCH; +import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.POSSIBLE_MATCH; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -52,6 +53,23 @@ class MdmLinkUpdaterSvcImplTest extends BaseMdmR4Test { assertLinksMatchedByEid(false, false); } + @Test + public void testUpdateLinkPossibleMatchSavesNormalizedScore() { + final Patient goldenPatient = createGoldenPatient(buildJanePatient()); + final Patient patient1 = createPatient(buildJanePatient()); + buildUpdateLinkMdmTransactionContext(); + + MdmMatchOutcome matchOutcome = new MdmMatchOutcome(61L, 5.0).setMdmRuleCount(6).setMatchResultEnum(POSSIBLE_MATCH); + myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, patient1, matchOutcome, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient")); + + final List targets = myMdmLinkDaoSvc.findMdmLinksByGoldenResource(goldenPatient); + assertFalse(targets.isEmpty()); + assertEquals(1, targets.size()); + final MdmLink mdmLink = targets.get(0); + + assertEquals(matchOutcome.getNormalizedScore(), mdmLink.getScore()); + } + @Test public void testUpdateLinkMatchAfterVersionChange() { myMdmSettings.getMdmRules().setVersion("1"); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java index 518668a848f..48004ff23f4 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java @@ -95,6 +95,7 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test { myPort = myServer.getPort(); myServerBase = myServer.getBaseUrl(); myClient = myServer.getFhirClient(); + myClient.setEncoding(EncodingEnum.JSON); myRestServer = myServer.getRestfulServer(); myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java index 2dbffa071bf..9e1ca135fe4 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java @@ -430,7 +430,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test { String respString = myClient.transaction().withBundle(input).prettyPrint().execute(); ourLog.debug(respString); - Bundle bundle = myFhirContext.newXmlParser().parseResource(Bundle.class, respString); + Bundle bundle = myFhirContext.newJsonParser().parseResource(Bundle.class, respString); IdType id = new IdType(bundle.getEntry().get(0).getResponse().getLocation()); Basic basic = myClient.read().resource(Basic.class).withId(id).execute(); @@ -1098,7 +1098,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test { //@formatter:on fail(); } catch (PreconditionFailedException e) { - assertEquals("HTTP 412 Precondition Failed: " + Msg.code(962) + "Failed to DELETE resource with match URL \"Patient?identifier=testDeleteConditionalMultiple\" because this search matched 2 resources", + assertEquals("HTTP 412 Precondition Failed: " + Msg.code(962) + "Failed to DELETE resource with match URL \"Patient?identifier=testDeleteConditionalMultiple&_format=json\" because this search matched 2 resources", e.getMessage()); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java index 372448608d6..f3355933943 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java @@ -34,6 +34,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -45,11 +47,13 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static ca.uhn.fhir.batch2.config.BaseBatch2Config.CHANNEL_NAME; import static ca.uhn.fhir.batch2.coordinator.WorkChunkProcessor.MAX_CHUNK_ERROR_COUNT; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -216,9 +220,44 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test { myLastStepLatch.awaitExpected(); } - @Test - public void testJobDefinitionWithReductionStepIT() throws InterruptedException { + private void createThreeStepReductionJob( + String theJobId, + IJobStepWorker theFirstStep, + IJobStepWorker theSecondStep, + IReductionStepWorker theReductionsStep + ) { + // create job definition (it's the test method's name) + JobDefinition jd = JobDefinition.newBuilder() + .setJobDefinitionId(theJobId) + .setJobDescription("test job") + .setJobDefinitionVersion(TEST_JOB_VERSION) + .setParametersType(TestJobParameters.class) + .gatedExecution() + .addFirstStep( + FIRST_STEP_ID, + "Test first step", + FirstStepOutput.class, + theFirstStep + ) + .addIntermediateStep("SECOND", + "Second step", + SecondStepOutput.class, + theSecondStep) + .addFinalReducerStep( + LAST_STEP_ID, + "Test last step", + ReductionStepOutput.class, + theReductionsStep + ) + .build(); + myJobDefinitionRegistry.addJobDefinition(jd); + } + + @ParameterizedTest + @ValueSource(booleans = { true, false }) + public void testJobDefinitionWithReductionStepIT(boolean theDelayReductionStepBool) throws InterruptedException { // setup + String jobId = new Exception().getStackTrace()[0].getMethodName() + "_" + theDelayReductionStepBool; String testInfo = "test"; AtomicInteger secondStepInt = new AtomicInteger(); @@ -235,6 +274,7 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test { SecondStepOutput output = new SecondStepOutput(); output.setValue(testInfo + secondStepInt.getAndIncrement()); sink.accept(output); + return RunOutcome.SUCCESS; }; @@ -243,63 +283,66 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test { private final ArrayList myOutput = new ArrayList<>(); + private final AtomicBoolean myBoolean = new AtomicBoolean(); + + private final AtomicInteger mySecondGate = new AtomicInteger(); + @Override public ChunkOutcome consume(ChunkExecutionDetails theChunkDetails) { myOutput.add(theChunkDetails.getData()); + // 1 because we know 2 packets are coming. + // we'll fire the second maintenance run on the second packet + // which should cause multiple maintenance runs to run simultaneously + if (theDelayReductionStepBool && mySecondGate.getAndIncrement() == 1) { + ourLog.info("SECOND FORCED MAINTENANCE PASS FORCED"); + myBatch2JobHelper.forceRunMaintenancePass(); + } return ChunkOutcome.SUCCESS(); } @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { - theDataSink.accept(new ReductionStepOutput(myOutput)); - callLatch(myLastStepLatch, theStepExecutionDetails); + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink + ) throws JobExecutionFailedException { + boolean isRunAlready = myBoolean.getAndSet(true); + assertFalse(isRunAlready, "Reduction step should only be called once!"); + + complete(theStepExecutionDetails, theDataSink); return RunOutcome.SUCCESS; } - }; - // create job definition - String jobId = new Exception().getStackTrace()[0].getMethodName(); - JobDefinition jd = JobDefinition.newBuilder() - .setJobDefinitionId(jobId) - .setJobDescription("test job") - .setJobDefinitionVersion(TEST_JOB_VERSION) - .setParametersType(TestJobParameters.class) - .gatedExecution() - .addFirstStep( - FIRST_STEP_ID, - "Test first step", - FirstStepOutput.class, - first - ) - .addIntermediateStep("SECOND", - "Second step", - SecondStepOutput.class, - second) - .addFinalReducerStep( - LAST_STEP_ID, - "Test last step", - ReductionStepOutput.class, - last - ) - .build(); - myJobDefinitionRegistry.addJobDefinition(jd); + private void complete( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink + ) { + assertTrue(myBoolean.get()); + theDataSink.accept(new ReductionStepOutput(myOutput)); + callLatch(myLastStepLatch, theStepExecutionDetails); + } + }; + createThreeStepReductionJob(jobId, first, second, last); // run test JobInstanceStartRequest request = buildRequest(jobId); myFirstStepLatch.setExpectedCount(1); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request); + String instanceId = startResponse.getJobId(); myFirstStepLatch.awaitExpected(); - + assertNotNull(instanceId); myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId); // wait for last step to finish + ourLog.info("Setting last step latch"); myLastStepLatch.setExpectedCount(1); + + // waiting myBatch2JobHelper.awaitJobCompletion(instanceId); myLastStepLatch.awaitExpected(); + ourLog.info("awaited the last step"); // verify Optional instanceOp = myJobPersistence.fetchInstance(instanceId); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java index a51d95528bf..5fc0faa02de 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java @@ -47,8 +47,8 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test { Assertions.assertEquals("replace", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "type")); Assertions.assertEquals("Patient.text.div", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "path")); - Assertions.assertEquals("

", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "previousValue")); - Assertions.assertEquals("
SMITH
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "value")); + Assertions.assertEquals("
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "previousValue")); + Assertions.assertEquals("
SMITH
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "value")); Assertions.assertEquals("insert", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 1, "operation", "type")); Assertions.assertEquals("Patient.name", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 1, "operation", "path")); @@ -86,8 +86,8 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test { Assertions.assertEquals("replace", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "type")); Assertions.assertEquals("Patient.text.div", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "path")); - Assertions.assertEquals("
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "previousValue")); - Assertions.assertEquals("
SMITH
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "value")); + Assertions.assertEquals("
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "previousValue")); + Assertions.assertEquals("
SMITH
", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "value")); Assertions.assertEquals("insert", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 3, "operation", "type")); Assertions.assertEquals("Patient.name", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 3, "operation", "path")); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index d99ff943d0e..9bc5c5a4fbf 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -764,6 +764,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { public void testUpdateResourceAfterReadOperationAndNoChangesShouldNotChangeVersion(){ // Create Patient Patient patient = new Patient(); + patient.getText().setDivAsString("
hello
"); + patient = (Patient) myClient.create().resource(patient).execute().getResource(); assertEquals(1, patient.getIdElement().getVersionIdPartAsLong()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index b93e8c42edc..45628664675 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -477,6 +477,7 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test { myCodeSystemDao.update(codeSystem, mySrd); await().until(() -> { + myBatch2JobHelper.runMaintenancePass(); myTerminologyDeferredStorageSvc.saveAllDeferred(); return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true); }, equalTo(true)); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java index 9cb7aef1cb9..2efa9a0c90c 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java @@ -227,19 +227,12 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test { try (CloseableHttpResponse resp = ourHttpClient.execute(post)) { String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); ourLog.debug(respString); -// assertEquals(200, resp.getStatusLine().getStatusCode()); + assertEquals(200, resp.getStatusLine().getStatusCode()); + // As of 2023-01-26, the above line was restored. // As of 2021-12-28, the R5 structures return a version string that isn't // actually in the fhirVersion ValueSet. If this stops being the case this // test will fail and the line above should be restored - OperationOutcome oo = myFhirCtx.newJsonParser().parseResource(OperationOutcome.class, respString); - assertEquals(1, oo.getIssue().size()); -// assertThat(oo.getIssue().get(0).getDiagnostics(), containsString("is not in the value set 'FHIRVersion'")); - //As of 2022-10-06, the error is now that RequestGroup is not in the resourcetypes valueset, (though it is). - - //TODO JA: I'm not sure if i have to update this valueset somewhere? the linked valueset _does_ contain the resource type. - assertThat(oo.getIssue().get(0).getDiagnostics(), containsString("is not in the value set 'Resource Types'")); - } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java index 0af74a7fa66..c218b07ab23 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java @@ -71,7 +71,6 @@ public class Batch2JobHelper { return awaitJobHasStatusWithoutMaintenancePass(theBatchJobId, StatusEnum.COMPLETED); } - public JobInstance awaitJobCancelled(String theBatchJobId) { return awaitJobHasStatus(theBatchJobId, StatusEnum.CANCELLED); } @@ -106,7 +105,6 @@ public class Batch2JobHelper { return myJobCoordinator.getInstance(theBatchJobId); } - public JobInstance awaitJobawaitJobHasStatusWithoutMaintenancePass(String theBatchJobId, int theSecondsToWait, StatusEnum... theExpectedStatus) { assert !TransactionSynchronizationManager.isActualTransactionActive(); @@ -168,7 +166,6 @@ public class Batch2JobHelper { public long getCombinedRecordsProcessed(String theJobId) { JobInstance job = myJobCoordinator.getInstance(theJobId); return job.getCombinedRecordsProcessed(); - } public void awaitAllJobsOfJobDefinitionIdToComplete(String theJobDefinitionId) { @@ -243,6 +240,14 @@ public class Batch2JobHelper { myJobMaintenanceService.runMaintenancePass(); } + /** + * Forces a run of the maintenance pass without waiting for + * the semaphore to release + */ + public void forceRunMaintenancePass() { + myJobMaintenanceService.forceMaintenancePass(); + } + public void cancelAllJobsAndAwaitCancellation() { List instances = myJobPersistence.fetchInstances(1000, 0); for (JobInstance next : instances) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java index 6d5f4268dda..5e0138a6bb6 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java @@ -36,13 +36,13 @@ public final class MdmMatchOutcome { /** * A bitmap that indicates which rules matched */ - public final Long vector; + private final Long vector; /** * The sum of all scores for all rules evaluated. Similarity rules add the similarity score (between 0.0 and 1.0) whereas * matcher rules add either a 0.0 or 1.0. */ - public final Double score; + private final Double score; /** * Did the MDM match operation result in creating a new golden resource resource? @@ -134,6 +134,10 @@ public final class MdmMatchOutcome { return this; } + public Double getScore() { return score; } + + public Long getVector() { return vector; } + /** * Gets normalized score that is in the range from zero to one * diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java index f504d5a2912..571f747372e 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java @@ -89,12 +89,12 @@ public class MdmResourceMatcherSvc { MdmMatchOutcome match(IBaseResource theLeftResource, IBaseResource theRightResource) { MdmMatchOutcome matchResult = getMatchOutcome(theLeftResource, theRightResource); - MdmMatchResultEnum matchResultEnum = myMdmRulesJson.getMatchResult(matchResult.vector); + MdmMatchResultEnum matchResultEnum = myMdmRulesJson.getMatchResult(matchResult.getVector()); matchResult.setMatchResultEnum(matchResultEnum); if (ourLog.isDebugEnabled()) { ourLog.debug("{} {}: {}", matchResult.getMatchResultEnum(), theRightResource.getIdElement().toUnqualifiedVersionless(), matchResult); if (ourLog.isTraceEnabled()) { - ourLog.trace("Field matcher results:\n{}", myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.vector)); + ourLog.trace("Field matcher results:\n{}", myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.getVector())); } } return matchResult; @@ -133,8 +133,8 @@ public class MdmResourceMatcherSvc { ourLog.trace("Matcher {} is valid for resource type: {}. Evaluating match.", fieldComparator.getName(), resourceType); MdmMatchEvaluation matchEvaluation = fieldComparator.match(theLeftResource, theRightResource); if (matchEvaluation.match) { - vector |= (1 << i); - ourLog.trace("Match: Successfully matched matcher {} with score {}.", fieldComparator.getName(), matchEvaluation.score); + vector |= (1L << i); + ourLog.trace("Match: Successfully matched matcher {} with score {}. New vector: {}", fieldComparator.getName(), matchEvaluation.score, vector); } else { ourLog.trace("No match: Matcher {} did not match (score: {}).", fieldComparator.getName(), matchEvaluation.score); } diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java index 337f0f1afa9..494cd93992d 100644 --- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java +++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java @@ -43,8 +43,8 @@ public abstract class BaseR4Test { } protected void assertMatchResult(MdmMatchResultEnum theExpectedMatchEnum, long theExpectedVector, double theExpectedScore, boolean theExpectedNewGoldenResource, boolean theExpectedEidMatch, MdmMatchOutcome theMatchResult) { - assertEquals(theExpectedScore, theMatchResult.score, 0.001); - assertEquals(theExpectedVector, theMatchResult.vector); + assertEquals(theExpectedScore, theMatchResult.getScore(), 0.001); + assertEquals(theExpectedVector, theMatchResult.getVector()); assertEquals(theExpectedEidMatch, theMatchResult.isEidMatch()); assertEquals(theExpectedNewGoldenResource, theMatchResult.isCreatedNewResource()); assertEquals(theExpectedMatchEnum, theMatchResult.getMatchResultEnum()); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java new file mode 100644 index 00000000000..043da7cda0c --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java @@ -0,0 +1,37 @@ +package ca.uhn.fhir.batch2.jobs.export; + +import ca.uhn.fhir.batch2.model.StatusEnum; +import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import org.slf4j.Logger; + +import static org.slf4j.LoggerFactory.getLogger; + +public class BulkExportUtil { + private static final Logger ourLog = getLogger(BulkExportUtil.class); + + private BulkExportUtil() { + + } + + /** + * Converts Batch2 StatusEnum -> BulkExportJobStatusEnum + */ + public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) { + switch (status) { + case QUEUED: + case FINALIZE: + return BulkExportJobStatusEnum.SUBMITTED; + case COMPLETED : + return BulkExportJobStatusEnum.COMPLETE; + case IN_PROGRESS: + return BulkExportJobStatusEnum.BUILDING; + default: + ourLog.warn("Unrecognized status {}; treating as FAILED/CANCELLED/ERRORED", status.name()); + case FAILED: + case CANCELLED: + case ERRORED: + return BulkExportJobStatusEnum.ERROR; + } + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java index 44561089a25..c881389938c 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java @@ -22,10 +22,10 @@ package ca.uhn.fhir.batch2.jobs.services; import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.JobOperationResultJson; +import ca.uhn.fhir.batch2.jobs.export.BulkExportUtil; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; -import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.model.Batch2JobInfo; import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult; @@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.api.model.BulkExportParameters; import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner; import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; -import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.Batch2JobDefinitionConstants; import org.slf4j.Logger; @@ -97,7 +96,10 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner { private Batch2JobInfo fromJobInstanceToBatch2JobInfo(@Nonnull JobInstance theInstance) { Batch2JobInfo info = new Batch2JobInfo(); info.setJobId(theInstance.getInstanceId()); - info.setStatus(fromBatchStatus(theInstance.getStatus())); + // should convert this to a more generic enum for all batch2 (which is what it seems like) + // or use the status enum only (combine with bulk export enum) + // on the Batch2JobInfo + info.setStatus(BulkExportUtil.fromBatchStatus(theInstance.getStatus())); info.setCancelled(theInstance.isCancelled()); info.setStartTime(theInstance.getStartTime()); info.setEndTime(theInstance.getEndTime()); @@ -106,22 +108,6 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner { return info; } - public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) { - switch (status) { - case QUEUED: - return BulkExportJobStatusEnum.SUBMITTED; - case COMPLETED : - return BulkExportJobStatusEnum.COMPLETE; - case IN_PROGRESS: - return BulkExportJobStatusEnum.BUILDING; - case FAILED: - case CANCELLED: - case ERRORED: - default: - return BulkExportJobStatusEnum.ERROR; - } - } - private Batch2JobStartResponse startBatch2BulkExportJob(BulkExportParameters theParameters) { JobInstanceStartRequest request = createStartRequest(theParameters); request.setParameters(BulkExportJobParameters.createFromExportJobParameters(theParameters)); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java index 1bdb55853dc..f3c4a9e2428 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java @@ -20,6 +20,8 @@ package ca.uhn.fhir.batch2.api; * #L% */ +import com.google.common.annotations.VisibleForTesting; + public interface IJobMaintenanceService { /** * Do not wait for the next scheduled time for maintenance. Trigger it immediately. @@ -29,4 +31,10 @@ public interface IJobMaintenanceService { void runMaintenancePass(); + /** + * Forces a second maintenance run. + * Only to be used in tests to simulate a long running maintenance step + */ + @VisibleForTesting + void forceMaintenancePass(); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java index 1a3b5c4b040..897ce6b86bd 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java @@ -28,8 +28,8 @@ import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobWorkCursor; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; import javax.annotation.Nonnull; @@ -53,7 +53,7 @@ public class JobStepExecutor theCursor, @Nonnull WorkChunkProcessor theExecutor, IJobMaintenanceService theJobMaintenanceService) { myJobPersistence = theJobPersistence; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java index c00d048eb9b..4b5ba0e48d1 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java @@ -46,7 +46,7 @@ public class JobStepExecutorFactory { myJobMaintenanceService = theJobMaintenanceService; } - public JobStepExecutor newJobStepExecutor(@Nonnull JobInstance theInstance, @Nonnull WorkChunk theWorkChunk, @Nonnull JobWorkCursor theCursor) { + public JobStepExecutor newJobStepExecutor(@Nonnull JobInstance theInstance, WorkChunk theWorkChunk, @Nonnull JobWorkCursor theCursor) { return new JobStepExecutor<>(myJobPersistence, myBatchJobSender, theInstance, theWorkChunk, theCursor, myJobStepExecutorSvc, myJobMaintenanceService); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java index 94ce7eb89c6..463109a2e14 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java @@ -28,8 +28,8 @@ import ca.uhn.fhir.batch2.model.JobDefinitionStep; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.WorkChunk; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; import java.util.ArrayList; @@ -55,10 +55,14 @@ public class ReductionStepExecutor { ) { IReductionStepWorker reductionStepWorker = (IReductionStepWorker) theStep.getJobStepWorker(); - // we mark it first so that no other maintenance passes will pick this job up! - // if we shut down mid process, though, it will be stuck in FINALIZE forever :( if (!myJobPersistence.markInstanceAsStatus(theInstance.getInstanceId(), StatusEnum.FINALIZE)) { - ourLog.warn("JobInstance[{}] is already in FINALIZE state, no reducer action performed.", theInstance.getInstanceId()); + ourLog.warn( + "JobInstance[{}] is already in FINALIZE state. In memory status is {}. Reduction step will not rerun!" + + " This could be a long running reduction job resulting in the processed msg not being acknowledge," + + " or the result of a failed process or server restarting.", + theInstance.getInstanceId(), + theInstance.getStatus().name() + ); return false; } theInstance.setStatus(StatusEnum.FINALIZE); @@ -106,6 +110,8 @@ public class ReductionStepExecutor { break; case FAIL: + // non-idempotent; but failed chunks will be + // ignored on a second runthrough of reduction step myJobPersistence.markWorkChunkAsFailed(chunk.getId(), "Step worker failed to process work chunk " + chunk.getId()); retval = false; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java index ffaf4028e2b..a8fa8a00d25 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java @@ -32,8 +32,9 @@ import ca.uhn.fhir.batch2.model.JobWorkNotificationJsonMessage; import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater; -import ca.uhn.fhir.util.Logs; +import ca.uhn.fhir.batch2.util.Batch2Constants; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.springframework.messaging.Message; @@ -75,17 +76,28 @@ class WorkChannelMessageHandler implements MessageHandler { String chunkId = workNotification.getChunkId(); Validate.notNull(chunkId); - Optional chunkOpt = myJobPersistence.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId); - if (chunkOpt.isEmpty()) { - ourLog.error("Unable to find chunk with ID {} - Aborting", chunkId); - return; + + boolean isReductionWorkNotification = Batch2Constants.REDUCTION_STEP_CHUNK_ID_PLACEHOLDER.equals(chunkId); + + JobWorkCursor cursor = null; + WorkChunk workChunk = null; + if (!isReductionWorkNotification) { + Optional chunkOpt = myJobPersistence.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId); + if (chunkOpt.isEmpty()) { + ourLog.error("Unable to find chunk with ID {} - Aborting", chunkId); + return; + } + workChunk = chunkOpt.get(); + ourLog.debug("Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", chunkId, workChunk.getTargetStepId(), workChunk.getStartTime()); + + cursor = buildCursorFromNotification(workNotification); + + Validate.isTrue(workChunk.getTargetStepId().equals(cursor.getCurrentStepId()), "Chunk %s has target step %s but expected %s", chunkId, workChunk.getTargetStepId(), cursor.getCurrentStepId()); + } else { + ourLog.debug("Processing reduction step work notification. No associated workchunks."); + + cursor = buildCursorFromNotification(workNotification); } - WorkChunk workChunk = chunkOpt.get(); - ourLog.debug("Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", chunkId, workChunk.getTargetStepId(), workChunk.getStartTime()); - - JobWorkCursor cursor = buildCursorFromNotification(workNotification); - - Validate.isTrue(workChunk.getTargetStepId().equals(cursor.getCurrentStepId()), "Chunk %s has target step %s but expected %s", chunkId, workChunk.getTargetStepId(), cursor.getCurrentStepId()); Optional instanceOpt = myJobPersistence.fetchInstance(workNotification.getInstanceId()); JobInstance instance = instanceOpt.orElseThrow(() -> new InternalErrorException("Unknown instance: " + workNotification.getInstanceId())); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java index aaed2f8148d..8dba0b14492 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java @@ -32,13 +32,12 @@ import ca.uhn.fhir.batch2.model.JobDefinitionStep; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobWorkCursor; import ca.uhn.fhir.batch2.model.WorkChunk; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import javax.annotation.Nullable; - import java.util.Optional; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -102,7 +101,7 @@ public class WorkChunkProcessor { boolean success = myReductionStepExecutor.executeReductionStep(theInstance, step, inputType, parameters); if (success) { - // Now call call the normal step executor + // Now call the normal step executor // the data sink stores the report on the instance (i.e. not chunks). // Assume the OT (report) data is smaller than the list of all IT data @@ -113,7 +112,6 @@ public class WorkChunkProcessor { } return new JobStepExecutorOutput<>(success, dataSink); - } else { // all other kinds of steps Validate.notNull(theWorkChunk); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java index 03d6320d54e..85c319ec3ef 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java @@ -35,7 +35,7 @@ import org.slf4j.Logger; import javax.annotation.Nonnull; import java.util.Date; -import static ca.uhn.fhir.batch2.config.Batch2Constants.BATCH_START_DATE; +import static ca.uhn.fhir.batch2.util.Batch2Constants.BATCH_START_DATE; public class GenerateRangeChunksStep implements IFirstJobStepWorker { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java index c253927b0d0..5243f2473cd 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java @@ -22,7 +22,6 @@ package ca.uhn.fhir.batch2.maintenance; import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.channel.BatchJobSender; -import ca.uhn.fhir.batch2.coordinator.JobStepExecutorOutput; import ca.uhn.fhir.batch2.coordinator.WorkChunkProcessor; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobWorkCursor; @@ -30,11 +29,11 @@ import ca.uhn.fhir.batch2.model.JobWorkNotification; import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.progress.JobInstanceProgressCalculator; import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater; +import ca.uhn.fhir.batch2.util.Batch2Constants; import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.time.DateUtils; import org.slf4j.Logger; -import java.util.Date; import java.util.List; public class JobInstanceProcessor { @@ -125,6 +124,8 @@ public class JobInstanceProcessor { private void triggerGatedExecutions() { if (!myInstance.isRunning()) { + ourLog.debug("JobInstance {} is not in a \"running\" state. Status {}", + myInstance.getInstanceId(), myInstance.getStatus().name()); return; } @@ -136,9 +137,12 @@ public class JobInstanceProcessor { // final step if (jobWorkCursor.isFinalStep() && !jobWorkCursor.isReductionStep()) { + ourLog.debug("Job instance {} is in final step and it's not a reducer step", myInstance.getInstanceId()); return; } + // we should not be sending a second reduction step + // to the queue if it's in finalize status if (jobWorkCursor.isReductionStep() && myInstance.getStatus() == StatusEnum.FINALIZE) { ourLog.warn("Job instance {} is still finalizing - a second reduction job will not be started.", myInstance.getInstanceId()); return; @@ -179,16 +183,13 @@ public class JobInstanceProcessor { myJobPersistence.updateInstance(myInstance); } - private void processReductionStep(JobWorkCursor jobWorkCursor) { - // do execution of the final step now - // (ie, we won't send to job workers) - JobStepExecutorOutput result = myJobExecutorSvc.doExecution( - JobWorkCursor.fromJobDefinitionAndRequestedStepId(myInstance.getJobDefinition(), jobWorkCursor.nextStep.getStepId()), + private void processReductionStep(JobWorkCursor theWorkCursor) { + JobWorkNotification workNotification = new JobWorkNotification( myInstance, - null); - if (!result.isSuccessful()) { - myInstance.setEndTime(new Date()); - myJobInstanceStatusUpdater.setFailed(myInstance); - } + theWorkCursor.nextStep.getStepId(), + Batch2Constants.REDUCTION_STEP_CHUNK_ID_PLACEHOLDER // chunk id; we don't need it + ); + ourLog.debug("Submitting a Work Notification for a job reduction step. No associated work chunk ids are available."); + myBatchJobSender.sendWorkChannelMessage(workNotification); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java index 2c89769feef..26ec4eb8899 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java @@ -179,6 +179,16 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc return myRunMaintenanceSemaphore.getQueueLength(); } + @VisibleForTesting + public void forceMaintenancePass() { + // to simulate a long running job! + ourLog.info( + "Forcing a maintenance pass run; semaphore at {}", + getQueueLength() + ); + doMaintenancePass(); + } + @Override public void runMaintenancePass() { if (!myRunMaintenanceSemaphore.tryAcquire()) { @@ -204,6 +214,7 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc myJobDefinitionRegistry.setJobDefinition(instance); JobInstanceProcessor jobInstanceProcessor = new JobInstanceProcessor(myJobPersistence, myBatchJobSender, instance, progressAccumulator, myJobExecutorSvc); + ourLog.debug("Triggering maintenance process for instance {} in status {}", instance.getInstanceId(), instance.getStatus().name()); jobInstanceProcessor.process(); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java index 903d80052ef..c1752db433a 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.api.IJobInstance; import ca.uhn.fhir.jpa.util.JsonDateDeserializer; import ca.uhn.fhir.jpa.util.JsonDateSerializer; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; @@ -32,7 +33,6 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import java.util.Date; -import java.util.Objects; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -359,10 +359,24 @@ public class JobInstance extends JobInstanceStartRequest implements IModelJson, } /** - * Returns true if the job instance is in {@link StatusEnum#IN_PROGRESS} and is not cancelled + * Returns true if the job instance is in: + * {@link StatusEnum#IN_PROGRESS} + * {@link StatusEnum#FINALIZE} + * and is not cancelled */ public boolean isRunning() { - return getStatus() == StatusEnum.IN_PROGRESS && !isCancelled(); + if (isCancelled()) { + return false; + } + + switch (getStatus()) { + case IN_PROGRESS: + case FINALIZE: + return true; + default: + Logs.getBatchTroubleshootingLog().debug("Status {} is considered \"not running\"", getStatus().name()); + } + return false; } public boolean isFinished() { @@ -376,7 +390,7 @@ public class JobInstance extends JobInstanceStartRequest implements IModelJson, } public boolean isPendingCancellationRequest() { - return myCancelled && (myStatus == StatusEnum.QUEUED || myStatus == StatusEnum.IN_PROGRESS); + return myCancelled && myStatus.isCancellable(); } /** diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java index 69b37341829..1b6042bd16f 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java @@ -34,51 +34,53 @@ public enum StatusEnum { /** * Task is waiting to execute and should begin with no intervention required. */ - QUEUED(true, false), + QUEUED(true, false, true), /** * Task is current executing */ - IN_PROGRESS(true, false), + IN_PROGRESS(true, false, true), /** * For reduction steps */ - FINALIZE(true, false), + FINALIZE(true, false, true), /** * Task completed successfully */ - COMPLETED(false, true), + COMPLETED(false, true, false), /** * Task execution resulted in an error but the error may be transient (or transient status is unknown). * Retrying may result in success. */ - ERRORED(true, true), + ERRORED(true, true, false), /** * Task has failed and is known to be unrecoverable. There is no reason to believe that retrying will * result in a different outcome. */ - FAILED(true, true), + FAILED(true, true, false), /** * Task has been cancelled. */ - CANCELLED(true, true); + CANCELLED(true, true, false); private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); private final boolean myIncomplete; private final boolean myEnded; + private final boolean myIsCancellable; private static StatusEnum[] ourIncompleteStatuses; private static Set ourEndedStatuses; private static Set ourNotEndedStatuses; - StatusEnum(boolean theIncomplete, boolean theEnded) { + StatusEnum(boolean theIncomplete, boolean theEnded, boolean theIsCancellable) { myIncomplete = theIncomplete; myEnded = theEnded; + myIsCancellable = theIsCancellable; } /** @@ -186,4 +188,8 @@ public enum StatusEnum { public boolean isIncomplete() { return myIncomplete; } + + public boolean isCancellable() { + return myIsCancellable; + } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java similarity index 82% rename from hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java rename to hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java index 6fd7e29917b..8855c5dee24 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.batch2.config; +package ca.uhn.fhir.batch2.util; /*- * #%L @@ -30,4 +30,10 @@ public class Batch2Constants { * date when performing operations that pull resources by time windows. */ public static final Date BATCH_START_DATE = new InstantType("2000-01-01T00:00:00Z").getValue(); + + /** + * This is a placeholder chunkid for the reduction step to allow it to be + * used in the message handling + */ + public static final String REDUCTION_STEP_CHUNK_ID_PLACEHOLDER = "REDUCTION"; } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java index da55e51734f..9f153bd63ba 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java @@ -26,7 +26,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.VoidModel; -import ca.uhn.fhir.batch2.config.Batch2Constants; +import ca.uhn.fhir.batch2.util.Batch2Constants; import ca.uhn.fhir.mdm.batch2.clear.MdmClearJobParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java index 4e44a8629da..7dcdddf5c4a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java @@ -369,9 +369,10 @@ public class BulkDataExportProvider { myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter()); response.getWriter().close(); break; + default: + ourLog.warn("Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", info.getStatus().name()); case BUILDING: case SUBMITTED: - default: if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) { handleDeleteRequest(theJobId, response, info.getStatus()); } else { diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java index d2acb997c6e..a4c30f553b7 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java @@ -2217,7 +2217,7 @@ public class JsonParserDstu3Test { input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"
\"}}"; basic = ourCtx.newJsonParser().parseResource(Basic.class, input); - assertEquals("
", basic.getText().getDivAsString()); + assertEquals("
", basic.getText().getDivAsString()); input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"
\"}}"; basic = ourCtx.newJsonParser().parseResource(Basic.class, input); diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java index 0860043f5d0..83855ed2fe3 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java @@ -447,4 +447,9 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext public IWorkerContext setPackageTracker(IWorkerContextManager.IPackageLoadingTracker theIPackageLoadingTracker) { throw new UnsupportedOperationException(Msg.code(220)); } + + @Override + public String getSpecUrl() { + throw new UnsupportedOperationException(Msg.code(2260)); + } } diff --git a/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties b/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties index 54eebc4a1a7..7879ad9a06b 100644 --- a/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties +++ b/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties @@ -122,7 +122,6 @@ resource.Questionnaire=org.hl7.fhir.r5.model.Questionnaire resource.QuestionnaireResponse=org.hl7.fhir.r5.model.QuestionnaireResponse resource.RegulatedAuthorization=org.hl7.fhir.r5.model.RegulatedAuthorization resource.RelatedPerson=org.hl7.fhir.r5.model.RelatedPerson -resource.RequestGroup=org.hl7.fhir.r5.model.RequestGroup resource.RequestOrchestration=org.hl7.fhir.r5.model.RequestOrchestration resource.Requirements=org.hl7.fhir.r5.model.Requirements resource.ResearchStudy=org.hl7.fhir.r5.model.ResearchStudy diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java new file mode 100644 index 00000000000..114a15f52c1 --- /dev/null +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java @@ -0,0 +1,82 @@ +package ca.uhn.fhir.test.utilities; + +/*- + * #%L + * HAPI FHIR Test Utilities + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class RangeTestHelper { + + public static final double THOUSANDTH = .001d; + + + public static void checkInRange(double base, double value) { + checkInRange(base, THOUSANDTH, value); + } + + public static void checkInRange(double theBase, double theRange, double theValue) { + double lowerBound = theBase - theRange; + double upperBound = theBase + theRange; + checkWithinBounds(lowerBound, upperBound, theValue); + } + + public static void checkInRange(String theBase, String theValue) { + // ease tests + if (theBase == null && theValue == null) { + return; + } + + double value = Double.parseDouble(theValue); + double base = Double.parseDouble(theBase); + checkInRange(base, THOUSANDTH, value); + } + + public static void checkInRange(String theBase, double theRange, String theValue) { + // ease tests + if (theBase == null && theValue == null) { + return; + } + + double value = Double.parseDouble(theValue); + double base = Double.parseDouble(theBase); + checkInRange(base, theRange, value); + } + + public static void checkWithinBounds(double theLowerBound, double theUpperBound, double theValue) { + assertThat(theValue, is(both(greaterThanOrEqualTo(theLowerBound)).and(lessThanOrEqualTo(theUpperBound)))); + } + + public static void checkWithinBounds(String theLowerBound, String theUpperBound, String theValue) { + assertNotNull(theLowerBound, "theLowerBound"); + assertNotNull(theUpperBound, "theUpperBound"); + assertNotNull(theValue, "theValue"); + double lowerBound = Double.parseDouble(theLowerBound); + double upperBound = Double.parseDouble(theUpperBound); + double value = Double.parseDouble(theValue); + checkWithinBounds(lowerBound, upperBound, value); + } + + +} diff --git a/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java b/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java new file mode 100644 index 00000000000..591444f240b --- /dev/null +++ b/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java @@ -0,0 +1,163 @@ +package ca.uhn.fhir.test.utilities; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +class RangeTestHelperTest { + + + @Nested + public class DefaultRange { + + @Test + void checkInRange() { + RangeTestHelper.checkInRange(.83d, .829999999d); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(.91, .83) + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(.26, .25) + ); + } + + @Nested + public class WithinBounds { + + @Test + void checkInRange() { + RangeTestHelper.checkWithinBounds(.91001, .91002, .910013); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkWithinBounds(.91001, .91002, .9013) + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkWithinBounds(.87, .88, .9) + ); + } + + @Nested + public class PassingStrings { + @Test + void checkInRange() { + RangeTestHelper.checkWithinBounds(".91001", ".91002", ".910013"); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkWithinBounds(".91001", ".91002", ".9013") + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkWithinBounds(".87", ".88", ".9") + ); + } + } + } + + @Nested + public class PassingStrings { + + @Test + void checkInRange() { + RangeTestHelper.checkInRange("0.83", "0.829999999"); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(".91", ".83") + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(".26", "0.25") + ); + } + } + + } + + @Nested + public class ProvidedRange { + + @Test + void checkInRange() { + // equals to higher bound + RangeTestHelper.checkInRange(.83, .1, .83); + RangeTestHelper.checkInRange(.831, .02, .833); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(.84, .01, .82) + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(.2511,.0001, .2513) + ); + } + + @Nested + public class PassingStrings { + + @Test + void checkInRange() { + RangeTestHelper.checkInRange(".82", .01, ".83"); + RangeTestHelper.checkInRange(".83d", .829999999d, ".8312d"); + } + + @Test + void checkLower() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(".91", .02, ".83") + ); + } + + @Test + void checkHigher() { + AssertionError thrown = assertThrows( + AssertionError.class, + () -> RangeTestHelper.checkInRange(".26", .03, "0.3") + ); + } + } + } +} diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java index 435aadb4977..7cc7e8d271a 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java @@ -171,6 +171,13 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo throw new UnsupportedOperationException(Msg.code(2114)); } + @Override + public String getSpecUrl() { + + return ""; + + } + @Override public PackageInformation getPackageForUrl(String s) { throw new UnsupportedOperationException(Msg.code(2109)); diff --git a/pom.xml b/pom.xml index b178c74e73c..cff3c3cf940 100644 --- a/pom.xml +++ b/pom.xml @@ -861,9 +861,11 @@ - + + 5.6.881 + 1.0.3 + -Dfile.encoding=UTF-8 -Xmx2048m - 5.6.84 1.0.3 -Dfile.encoding=UTF-8 -Xmx2048m