> when = when(myJpaJobPersistence.fetchInstance(anyString()));
+
+ for (JobInstance jobInstance : theJobInstances) {
+ when = when.thenReturn(Optional.of(jobInstance));
+ }
+
+ if (!theIsEnableBinaryMocks) {
+ return;
+ }
+
+ when(myBulkExportHelperSvc.toId(anyString()))
+ .thenAnswer(theInvocationOnMock -> toId(theInvocationOnMock.getArgument(0)));
+
+ when(myDaoRegistry.getResourceDao(Binary.class.getSimpleName())).thenReturn(myBinaryDao);
+ }
+
+ private IIdType toId(String theResourceId) {
+ final IIdType retVal = myFhirContext.getVersion().newIdType();
+ retVal.setValue(theResourceId);
+ return retVal;
+ }
+}
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index b57faeedb92..489052ae189 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml
index 705cf1ee742..740f7a310fc 100644
--- a/hapi-fhir-jpaserver-ips/pom.xml
+++ b/hapi-fhir-jpaserver-ips/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index 252539e79cc..12efcf662ff 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java
index dc257d64787..b3e9b4c25c8 100644
--- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java
+++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java
@@ -73,11 +73,9 @@ public class MdmLinkDaoSvc getLinkByGoldenResourcePidAndSourceResourcePid(P theGoldenResourcePid, P theSourceResourcePid) {
if (theSourceResourcePid == null || theGoldenResourcePid == null) {
return Optional.empty();
diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java
index 6c50ac1f9b9..51a4304c44a 100644
--- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java
+++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java
@@ -94,7 +94,7 @@ public class MdmMatchLinkSvc {
private void handleMdmWithMultipleCandidates(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) {
MatchedGoldenResourceCandidate firstMatch = theCandidateList.getFirstMatch();
- IResourcePersistentId sampleGoldenResourcePid = firstMatch.getCandidateGoldenResourcePid();
+ IResourcePersistentId> sampleGoldenResourcePid = firstMatch.getCandidateGoldenResourcePid();
boolean allSameGoldenResource = theCandidateList.stream()
.allMatch(candidate -> candidate.getCandidateGoldenResourcePid().equals(sampleGoldenResourcePid));
@@ -105,17 +105,7 @@ public class MdmMatchLinkSvc {
log(theMdmTransactionContext, "MDM received multiple match candidates, that were linked to different Golden Resources. Setting POSSIBLE_DUPLICATES and POSSIBLE_MATCHES.");
//Set them all as POSSIBLE_MATCH
- List goldenResources = new ArrayList<>();
- for (MatchedGoldenResourceCandidate matchedGoldenResourceCandidate : theCandidateList.getCandidates()) {
- IAnyResource goldenResource = myMdmGoldenResourceFindingSvc
- .getGoldenResourceFromMatchedGoldenResourceCandidate(matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType());
- MdmMatchOutcome outcome = new MdmMatchOutcome(matchedGoldenResourceCandidate.getMatchResult().vector,
- matchedGoldenResourceCandidate.getMatchResult().getNormalizedScore());
- outcome.setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH);
- outcome.setEidMatch(theCandidateList.isEidMatch());
- myMdmLinkSvc.updateLink(goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext);
- goldenResources.add(goldenResource);
- }
+ List goldenResources = createPossibleMatches(theResource, theCandidateList, theMdmTransactionContext);
//Set all GoldenResources as POSSIBLE_DUPLICATE of the last GoldenResource.
IAnyResource firstGoldenResource = goldenResources.get(0);
@@ -129,6 +119,26 @@ public class MdmMatchLinkSvc {
}
}
+ private List createPossibleMatches(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) {
+ List goldenResources = new ArrayList<>();
+
+ for (MatchedGoldenResourceCandidate matchedGoldenResourceCandidate : theCandidateList.getCandidates()) {
+ IAnyResource goldenResource = myMdmGoldenResourceFindingSvc
+ .getGoldenResourceFromMatchedGoldenResourceCandidate(matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType());
+
+ MdmMatchOutcome outcome = new MdmMatchOutcome(matchedGoldenResourceCandidate.getMatchResult().getVector(),
+ matchedGoldenResourceCandidate.getMatchResult().getScore())
+ .setMdmRuleCount( matchedGoldenResourceCandidate.getMatchResult().getMdmRuleCount());
+
+ outcome.setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH);
+ outcome.setEidMatch(theCandidateList.isEidMatch());
+ myMdmLinkSvc.updateLink(goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext);
+ goldenResources.add(goldenResource);
+ }
+
+ return goldenResources;
+ }
+
private void handleMdmWithNoCandidates(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) {
log(theMdmTransactionContext, String.format("There were no matched candidates for MDM, creating a new %s Golden Resource.", theResource.getIdElement().getResourceType()));
IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext);
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java
index 0ef1d0b6e1d..0232206d380 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/BaseMdmR4Test.java
@@ -569,6 +569,10 @@ abstract public class BaseMdmR4Test extends BaseJpaR4Test {
assertFields(MdmLink::getEidMatch, theExpectedValues);
}
+ protected void assertLinksMatchScore(Double... theExpectedValues) {
+ assertFields(MdmLink::getScore, theExpectedValues);
+ }
+
public SearchParameterMap buildGoldenResourceSearchParameterMap() {
SearchParameterMap spMap = new SearchParameterMap();
spMap.setLoadSynchronous(true);
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java
index 49646c4e7ec..b9b20e7ecdc 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvcTest.java
@@ -100,7 +100,6 @@ public class MdmLinkDaoSvcTest extends BaseMdmR4Test {
mdmLink.setUpdated(new Date());
mdmLink.setGoldenResourcePersistenceId(JpaPid.fromId(thePatientPid));
mdmLink.setSourcePersistenceId(runInTransaction(()->myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), patient)));
- MdmLink saved= myMdmLinkDao.save(mdmLink);
- return saved;
+ return myMdmLinkDao.save(mdmLink);
}
}
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java
index a8afac6393c..dfe9df38af9 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderQueryLinkR4Test.java
@@ -10,6 +10,7 @@ import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
+import ca.uhn.fhir.test.utilities.RangeTestHelper;
import ca.uhn.fhir.util.ParametersUtil;
import ca.uhn.fhir.util.StopWatch;
import org.apache.commons.lang3.StringUtils;
@@ -76,9 +77,12 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test {
JpaPid sourcePatient2Pid = runInTransaction(()->myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), sourcePatient2));
MdmLink possibleDuplicateMdmLink = (MdmLink) myMdmLinkDaoSvc.newMdmLink();
- possibleDuplicateMdmLink.setGoldenResourcePersistenceId(sourcePatient1Pid);
- possibleDuplicateMdmLink.setSourcePersistenceId(sourcePatient2Pid);
- possibleDuplicateMdmLink.setMatchResult(MdmMatchResultEnum.POSSIBLE_DUPLICATE).setLinkSource(MdmLinkSourceEnum.AUTO);
+ possibleDuplicateMdmLink.setGoldenResourcePersistenceId(sourcePatient1Pid)
+ .setSourcePersistenceId(sourcePatient2Pid)
+ .setMatchResult(MdmMatchResultEnum.POSSIBLE_DUPLICATE)
+ .setLinkSource(MdmLinkSourceEnum.AUTO)
+ .setScore(1.0)
+ .setRuleCount(1L);
saveLink(possibleDuplicateMdmLink);
}
@@ -89,7 +93,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test {
List list = getParametersByName(result, "link");
assertThat(list, hasSize(1));
List part = list.get(0).getPart();
- assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, mySourcePatientId.getValue(), myPatientId.getValue(), MdmMatchResultEnum.POSSIBLE_MATCH, "false", "true", null);
+ assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, mySourcePatientId.getValue(), myPatientId.getValue(), MdmMatchResultEnum.POSSIBLE_MATCH, "false", "true", "1");
}
@Test
@@ -99,7 +103,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test {
List list = getParametersByName(result, "link");
assertThat("All resources with Patient type found", list, hasSize(3));
List part = list.get(0).getPart();
- assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, mySourcePatientId.getValue(), myPatientId.getValue(), MdmMatchResultEnum.POSSIBLE_MATCH, "false", "true", null);
+ assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, mySourcePatientId.getValue(), myPatientId.getValue(), MdmMatchResultEnum.POSSIBLE_MATCH, "false", "true", "1");
}
@@ -377,7 +381,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test {
List list = getParametersByName(result, "link");
assertThat(list, hasSize(4));
List part = list.get(3).getPart();
- assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, goldenResourceId.getValue(), patientId.getValue(), MdmMatchResultEnum.MATCH, "false", "false", "2");
+ assertMdmLink(MDM_LINK_PROPERTY_COUNT, part, goldenResourceId.getValue(), patientId.getValue(), MdmMatchResultEnum.MATCH, "false", "false", ".666");
}
@Test
@@ -459,7 +463,7 @@ public class MdmProviderQueryLinkR4Test extends BaseLinkR4Test {
assertThat(thePart.get(5).getValue().primitiveValue(), is(theNewGoldenResource));
assertThat(thePart.get(6).getName(), is("score"));
- assertThat(thePart.get(6).getValue().primitiveValue(), is(theScore));
+ RangeTestHelper.checkInRange(theScore, thePart.get(6).getValue().primitiveValue());
}
}
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java
index 5dcbfa4739b..c514f162825 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImplTest.java
@@ -16,6 +16,7 @@ import java.util.List;
import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.MATCH;
import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.NO_MATCH;
+import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.POSSIBLE_MATCH;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -52,6 +53,23 @@ class MdmLinkUpdaterSvcImplTest extends BaseMdmR4Test {
assertLinksMatchedByEid(false, false);
}
+ @Test
+ public void testUpdateLinkPossibleMatchSavesNormalizedScore() {
+ final Patient goldenPatient = createGoldenPatient(buildJanePatient());
+ final Patient patient1 = createPatient(buildJanePatient());
+ buildUpdateLinkMdmTransactionContext();
+
+ MdmMatchOutcome matchOutcome = new MdmMatchOutcome(61L, 5.0).setMdmRuleCount(6).setMatchResultEnum(POSSIBLE_MATCH);
+ myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, patient1, matchOutcome, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient"));
+
+ final List targets = myMdmLinkDaoSvc.findMdmLinksByGoldenResource(goldenPatient);
+ assertFalse(targets.isEmpty());
+ assertEquals(1, targets.size());
+ final MdmLink mdmLink = targets.get(0);
+
+ assertEquals(matchOutcome.getNormalizedScore(), mdmLink.getScore());
+ }
+
@Test
public void testUpdateLinkMatchAfterVersionChange() {
myMdmSettings.getMdmRules().setVersion("1");
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvcTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvcTest.java
index aaba4bc6798..728cce1b41b 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvcTest.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvcTest.java
@@ -68,6 +68,7 @@ public class MdmMatchLinkSvcTest extends BaseMdmR4Test {
assertLinksMatchResult(MATCH);
assertLinksCreatedNewResource(true);
assertLinksMatchedByEid(false);
+ assertLinksMatchScore(1.0);
}
@Test
@@ -79,6 +80,7 @@ public class MdmMatchLinkSvcTest extends BaseMdmR4Test {
assertLinksMatchResult(MATCH);
assertLinksCreatedNewResource(true);
assertLinksMatchedByEid(false);
+ assertLinksMatchScore(1.0);
}
@Test
@@ -93,6 +95,7 @@ public class MdmMatchLinkSvcTest extends BaseMdmR4Test {
assertLinksMatchResult(MATCH, MATCH);
assertLinksCreatedNewResource(true, true);
assertLinksMatchedByEid(false, false);
+ assertLinksMatchScore(1.0, 1.0);
}
@Test
@@ -107,6 +110,7 @@ public class MdmMatchLinkSvcTest extends BaseMdmR4Test {
assertLinksMatchResult(MATCH, MATCH);
assertLinksCreatedNewResource(true, false);
assertLinksMatchedByEid(false, false);
+ assertLinksMatchScore(1.0, 2.0/3.0);
}
@Test
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 06b7e2cf3d7..991fef9b60a 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index c3825b656d3..2d6a1b0ca8d 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
index 1741f11d8ea..69d1a968cf4 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
@@ -1818,10 +1818,11 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
nextId = valueRef.getResource().getIdElement();
}
- if (nextId == null ||
- nextId.isEmpty() ||
- nextId.getValue().startsWith("urn:")) {
- // Ignore placeholder references
+ if (
+ nextId == null ||
+ nextId.isEmpty()
+ ) {
+ // Ignore placeholder references that are blank
} else if (!theWantLocalReferences && nextId.getValue().startsWith("#")) {
// Ignore local refs unless we specifically want them
} else {
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
index d647ca527cc..425f78fc52f 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
@@ -105,8 +105,10 @@ public class SearchParamExtractorService {
}
/**
- * This method is responsible for scanning a resource for all of the search parameter instances. I.e. for all search parameters defined for
- * a given resource type, it extracts the associated indexes and populates {@literal theParams}.
+ * This method is responsible for scanning a resource for all of the search parameter instances.
+ * I.e. for all search parameters defined for
+ * a given resource type, it extracts the associated indexes and populates
+ * {@literal theParams}.
*/
public void extractFromResource(RequestPartitionId theRequestPartitionId, RequestDetails theRequestDetails, ResourceIndexedSearchParams theNewParams, ResourceIndexedSearchParams theExistingParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference) {
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 39e1b4a3c67..1ec3b1d97fc 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java
index 0507d1a5993..0db6757cfce 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java
@@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.SubscriptionUtil;
+import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -144,4 +145,9 @@ public class SubscriptionActivatingSubscriber extends BaseSubscriberForSubscript
}
}
+ public boolean isChannelTypeSupported(IBaseResource theSubscription) {
+ Subscription.SubscriptionChannelType channelType = mySubscriptionCanonicalizer.getChannelType(theSubscription).toCanonical();
+ return myDaoConfig.getSupportedSubscriptionTypes().contains(channelType);
+ }
+
}
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java
index 35e06522741..6f5b179e859 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java
@@ -228,14 +228,24 @@ public class SubscriptionLoader implements IResourceChangeListener {
* @return true if activated
*/
private boolean activateSubscriptionIfRequested(IBaseResource theSubscription) {
+ boolean successfullyActivated = false;
+
if (SubscriptionConstants.REQUESTED_STATUS.equals(mySubscriptionCanonicalizer.getSubscriptionStatus(theSubscription))) {
- // internally, subscriptions that cannot activate will be set to error
- if (mySubscriptionActivatingInterceptor.activateSubscriptionIfRequired(theSubscription)) {
- return true;
+ if (mySubscriptionActivatingInterceptor.isChannelTypeSupported(theSubscription)) {
+ // internally, subscriptions that cannot activate will be set to error
+ if (mySubscriptionActivatingInterceptor.activateSubscriptionIfRequired(theSubscription)) {
+ successfullyActivated = true;
+ } else {
+ logSubscriptionNotActivatedPlusErrorIfPossible(theSubscription);
+ }
+ } else {
+ ourLog.debug("Could not activate subscription {} because channel type {} is not supported.",
+ theSubscription.getIdElement(),
+ mySubscriptionCanonicalizer.getChannelType(theSubscription));
}
- logSubscriptionNotActivatedPlusErrorIfPossible(theSubscription);
}
- return false;
+
+ return successfullyActivated;
}
/**
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java
index eb81ac3a00b..12f7d275214 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java
@@ -83,6 +83,21 @@ public class SubscriptionValidatingInterceptor {
myFhirContext = theFhirContext;
}
+ // This will be deleted once the next snapshot (6.3.15) is published
+ @Deprecated
+ public void validateSubmittedSubscription(IBaseResource theSubscription) {
+ validateSubmittedSubscription(theSubscription, null, null, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED);
+ }
+
+ // This will be deleted once the next snapshot (6.3.15) is published
+ @Deprecated(since="6.3.14")
+ public void validateSubmittedSubscription(IBaseResource theSubscription,
+ RequestDetails theRequestDetails,
+ RequestPartitionId theRequestPartitionId) {
+
+ validateSubmittedSubscription(theSubscription, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED);
+ }
+
@VisibleForTesting
void validateSubmittedSubscription(IBaseResource theSubscription,
RequestDetails theRequestDetails,
diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoaderTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoaderTest.java
index 1a720a867b1..b0fe98e11f3 100644
--- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoaderTest.java
+++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoaderTest.java
@@ -134,6 +134,9 @@ public class SubscriptionLoaderTest {
when(mySubscriptionActivatingInterceptor.activateSubscriptionIfRequired(any(IBaseResource.class)))
.thenReturn(false);
+ when(mySubscriptionActivatingInterceptor.isChannelTypeSupported(any(IBaseResource.class)))
+ .thenReturn(true);
+
when(mySubscriptionCanonicalizer.getSubscriptionStatus(any())).thenReturn(SubscriptionConstants.REQUESTED_STATUS);
// test
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index 0be6f861084..b25d68bdda4 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index 0c940988c9f..8889685dbf6 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java
index 518668a848f..48004ff23f4 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/BaseResourceProviderDstu3Test.java
@@ -95,6 +95,7 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
myPort = myServer.getPort();
myServerBase = myServer.getBaseUrl();
myClient = myServer.getFhirClient();
+ myClient.setEncoding(EncodingEnum.JSON);
myRestServer = myServer.getRestfulServer();
myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor);
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
index 2dbffa071bf..9e1ca135fe4 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
@@ -430,7 +430,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
String respString = myClient.transaction().withBundle(input).prettyPrint().execute();
ourLog.debug(respString);
- Bundle bundle = myFhirContext.newXmlParser().parseResource(Bundle.class, respString);
+ Bundle bundle = myFhirContext.newJsonParser().parseResource(Bundle.class, respString);
IdType id = new IdType(bundle.getEntry().get(0).getResponse().getLocation());
Basic basic = myClient.read().resource(Basic.class).withId(id).execute();
@@ -1098,7 +1098,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
//@formatter:on
fail();
} catch (PreconditionFailedException e) {
- assertEquals("HTTP 412 Precondition Failed: " + Msg.code(962) + "Failed to DELETE resource with match URL \"Patient?identifier=testDeleteConditionalMultiple\" because this search matched 2 resources",
+ assertEquals("HTTP 412 Precondition Failed: " + Msg.code(962) + "Failed to DELETE resource with match URL \"Patient?identifier=testDeleteConditionalMultiple&_format=json\" because this search matched 2 resources",
e.getMessage());
}
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index 4cb44fd2115..c4d2950f61a 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
index 372448608d6..f3355933943 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
@@ -34,6 +34,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -45,11 +47,13 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static ca.uhn.fhir.batch2.config.BaseBatch2Config.CHANNEL_NAME;
import static ca.uhn.fhir.batch2.coordinator.WorkChunkProcessor.MAX_CHUNK_ERROR_COUNT;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@@ -216,9 +220,44 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
myLastStepLatch.awaitExpected();
}
- @Test
- public void testJobDefinitionWithReductionStepIT() throws InterruptedException {
+ private void createThreeStepReductionJob(
+ String theJobId,
+ IJobStepWorker theFirstStep,
+ IJobStepWorker theSecondStep,
+ IReductionStepWorker theReductionsStep
+ ) {
+ // create job definition (it's the test method's name)
+ JobDefinition extends IModelJson> jd = JobDefinition.newBuilder()
+ .setJobDefinitionId(theJobId)
+ .setJobDescription("test job")
+ .setJobDefinitionVersion(TEST_JOB_VERSION)
+ .setParametersType(TestJobParameters.class)
+ .gatedExecution()
+ .addFirstStep(
+ FIRST_STEP_ID,
+ "Test first step",
+ FirstStepOutput.class,
+ theFirstStep
+ )
+ .addIntermediateStep("SECOND",
+ "Second step",
+ SecondStepOutput.class,
+ theSecondStep)
+ .addFinalReducerStep(
+ LAST_STEP_ID,
+ "Test last step",
+ ReductionStepOutput.class,
+ theReductionsStep
+ )
+ .build();
+ myJobDefinitionRegistry.addJobDefinition(jd);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = { true, false })
+ public void testJobDefinitionWithReductionStepIT(boolean theDelayReductionStepBool) throws InterruptedException {
// setup
+ String jobId = new Exception().getStackTrace()[0].getMethodName() + "_" + theDelayReductionStepBool;
String testInfo = "test";
AtomicInteger secondStepInt = new AtomicInteger();
@@ -235,6 +274,7 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
SecondStepOutput output = new SecondStepOutput();
output.setValue(testInfo + secondStepInt.getAndIncrement());
sink.accept(output);
+
return RunOutcome.SUCCESS;
};
@@ -243,63 +283,66 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
private final ArrayList myOutput = new ArrayList<>();
+ private final AtomicBoolean myBoolean = new AtomicBoolean();
+
+ private final AtomicInteger mySecondGate = new AtomicInteger();
+
@Override
public ChunkOutcome consume(ChunkExecutionDetails theChunkDetails) {
myOutput.add(theChunkDetails.getData());
+ // 1 because we know 2 packets are coming.
+ // we'll fire the second maintenance run on the second packet
+ // which should cause multiple maintenance runs to run simultaneously
+ if (theDelayReductionStepBool && mySecondGate.getAndIncrement() == 1) {
+ ourLog.info("SECOND FORCED MAINTENANCE PASS FORCED");
+ myBatch2JobHelper.forceRunMaintenancePass();
+ }
return ChunkOutcome.SUCCESS();
}
@Nonnull
@Override
- public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails,
- @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException {
- theDataSink.accept(new ReductionStepOutput(myOutput));
- callLatch(myLastStepLatch, theStepExecutionDetails);
+ public RunOutcome run(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink
+ ) throws JobExecutionFailedException {
+ boolean isRunAlready = myBoolean.getAndSet(true);
+ assertFalse(isRunAlready, "Reduction step should only be called once!");
+
+ complete(theStepExecutionDetails, theDataSink);
return RunOutcome.SUCCESS;
}
- };
- // create job definition
- String jobId = new Exception().getStackTrace()[0].getMethodName();
- JobDefinition extends IModelJson> jd = JobDefinition.newBuilder()
- .setJobDefinitionId(jobId)
- .setJobDescription("test job")
- .setJobDefinitionVersion(TEST_JOB_VERSION)
- .setParametersType(TestJobParameters.class)
- .gatedExecution()
- .addFirstStep(
- FIRST_STEP_ID,
- "Test first step",
- FirstStepOutput.class,
- first
- )
- .addIntermediateStep("SECOND",
- "Second step",
- SecondStepOutput.class,
- second)
- .addFinalReducerStep(
- LAST_STEP_ID,
- "Test last step",
- ReductionStepOutput.class,
- last
- )
- .build();
- myJobDefinitionRegistry.addJobDefinition(jd);
+ private void complete(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink
+ ) {
+ assertTrue(myBoolean.get());
+ theDataSink.accept(new ReductionStepOutput(myOutput));
+ callLatch(myLastStepLatch, theStepExecutionDetails);
+ }
+ };
+ createThreeStepReductionJob(jobId, first, second, last);
// run test
JobInstanceStartRequest request = buildRequest(jobId);
myFirstStepLatch.setExpectedCount(1);
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
+
String instanceId = startResponse.getJobId();
myFirstStepLatch.awaitExpected();
-
+ assertNotNull(instanceId);
myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId);
// wait for last step to finish
+ ourLog.info("Setting last step latch");
myLastStepLatch.setExpectedCount(1);
+
+ // waiting
myBatch2JobHelper.awaitJobCompletion(instanceId);
myLastStepLatch.awaitExpected();
+ ourLog.info("awaited the last step");
// verify
Optional instanceOp = myJobPersistence.fetchInstance(instanceId);
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java
index ed5acc2100b..03a5a477fe7 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImplTest.java
@@ -21,8 +21,11 @@ import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.domain.PageRequest;
import javax.annotation.Nonnull;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -133,6 +136,86 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
});
}
+ @Test
+ public void testFetchInstanceWithStatusAndCutoff_statues() {
+ myCaptureQueriesListener.clear();
+
+ final String completedId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 1);
+ final String failedId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.FAILED, 1);
+ final String erroredId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.ERRORED, 1);
+ final String cancelledId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.CANCELLED, 1);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.QUEUED, 1);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.IN_PROGRESS, 1);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.FINALIZE, 1);
+
+ final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
+ .minusMinutes(0);
+ final Date cutoffDate = Date.from(cutoffLocalDateTime
+ .atZone(ZoneId.systemDefault())
+ .toInstant());
+
+ final List jobInstancesByCutoff =
+ mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 100));
+
+ assertEquals(Set.of(completedId, failedId, erroredId, cancelledId),
+ jobInstancesByCutoff.stream()
+ .map(JobInstance::getInstanceId)
+ .collect(Collectors.toUnmodifiableSet()));
+ }
+
+ @Test
+ public void testFetchInstanceWithStatusAndCutoff_cutoffs() {
+ myCaptureQueriesListener.clear();
+
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 3);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 4);
+ final String sevenMinutesAgoId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 7);
+ final String eightMinutesAgoId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 8);
+
+ final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
+ .minusMinutes(6);
+
+ final Date cutoffDate = Date.from(cutoffLocalDateTime
+ .atZone(ZoneId.systemDefault())
+ .toInstant());
+
+ final List jobInstancesByCutoff =
+ mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 100));
+
+ myCaptureQueriesListener.logSelectQueries();
+ myCaptureQueriesListener.getSelectQueries().forEach(query -> ourLog.info("query: {}", query.getSql(true, true)));
+
+ assertEquals(Set.of(sevenMinutesAgoId, eightMinutesAgoId),
+ jobInstancesByCutoff.stream()
+ .map(JobInstance::getInstanceId)
+ .collect(Collectors.toUnmodifiableSet()));
+ }
+
+ @Test
+ public void testFetchInstanceWithStatusAndCutoff_pages() {
+ final String job1 = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+ final String job2 = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+ storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
+
+ final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
+ .minusMinutes(0);
+
+ final Date cutoffDate = Date.from(cutoffLocalDateTime
+ .atZone(ZoneId.systemDefault())
+ .toInstant());
+
+ final List jobInstancesByCutoff =
+ mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 2));
+
+ assertEquals(Set.of(job1, job2),
+ jobInstancesByCutoff.stream()
+ .map(JobInstance::getInstanceId)
+ .collect(Collectors.toUnmodifiableSet()));
+ }
+
/**
* Returns a set of statuses, and whether they should be successfully picked up and started by a consumer.
* @return
@@ -548,4 +631,29 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
return instance;
}
+
+ @Nonnull
+ private String storeJobInstanceAndUpdateWithEndTime(StatusEnum theStatus, int minutes) {
+ final JobInstance jobInstance = new JobInstance();
+
+ jobInstance.setJobDefinitionId(JOB_DEFINITION_ID);
+ jobInstance.setStatus(theStatus);
+ jobInstance.setJobDefinitionVersion(JOB_DEF_VER);
+ jobInstance.setParameters(CHUNK_DATA);
+ jobInstance.setReport("TEST");
+
+ final String id = mySvc.storeNewInstance(jobInstance);
+
+ jobInstance.setInstanceId(id);
+ final LocalDateTime localDateTime = LocalDateTime.now()
+ .minusMinutes(minutes);
+ ourLog.info("localDateTime: {}", localDateTime);
+ jobInstance.setEndTime(Date.from(localDateTime
+ .atZone(ZoneId.systemDefault())
+ .toInstant()));
+
+ mySvc.updateInstance(jobInstance);
+
+ return id;
+ }
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
index edc5aea249e..9038098aba2 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
@@ -49,6 +49,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -667,9 +668,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
assertNotNull(startResponse);
// Run a scheduled pass to build the export
- myBatch2JobHelper.awaitJobCompletion(startResponse.getJobId());
+ myBatch2JobHelper.awaitJobCompletion(startResponse.getJobId(), 60);
- await().until(() -> myJobRunner.getJobInfo(startResponse.getJobId()).getReport() != null);
+ await()
+ .atMost(120, TimeUnit.SECONDS)
+ .until(() -> myJobRunner.getJobInfo(startResponse.getJobId()).getReport() != null);
// Iterate over the files
String report = myJobRunner.getJobInfo(startResponse.getJobId()).getReport();
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
index 0ee9854bc5d..89bd2e9acd0 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
@@ -6,11 +6,15 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.SearchParameterUtil;
@@ -43,7 +47,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@@ -51,9 +55,10 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
-import java.util.stream.Collectors;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
@@ -61,12 +66,12 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
@@ -164,7 +169,9 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
HttpGet statusGet = new HttpGet(pollingLocation);
String expectedOriginalUrl = myClient.getServerBase() + "/$export";
try (CloseableHttpResponse status = ourHttpClient.execute(statusGet)) {
+ assertEquals(200, status.getStatusLine().getStatusCode());
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
+ assertTrue(isNotBlank(responseContent), responseContent);
ourLog.info(responseContent);
@@ -403,6 +410,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
@AfterEach
public void after() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
+ myDaoConfig.setBulkExportFileMaximumCapacity(DaoConfig.DEFAULT_BULK_EXPORT_FILE_MAXIMUM_CAPACITY);
}
@Test
@@ -430,6 +438,57 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
assertThat(typeToContents.get("Observation"), containsString("obs-included"));
assertThat(typeToContents.get("Observation"), not(containsString("obs-excluded")));
}
+
+ @Test
+ public void testBulkExportWithLowMaxFileCapacity() {
+ final int numPatients = 250;
+ myDaoConfig.setBulkExportFileMaximumCapacity(1);
+ myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
+
+ RequestDetails details = new SystemRequestDetails();
+ List patientIds = new ArrayList<>();
+ for(int i = 0; i < numPatients; i++){
+ String id = "p-"+i;
+ Patient patient = new Patient();
+ patient.setId(id);
+ myPatientDao.update(patient, details);
+ patientIds.add(id);
+ }
+
+ int patientsCreated = myPatientDao.search(SearchParameterMap.newSynchronous(), details).size();
+ assertEquals(numPatients, patientsCreated);
+
+ BulkDataExportOptions options = new BulkDataExportOptions();
+ options.setResourceTypes(Sets.newHashSet("Patient"));
+ options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setOutputFormat(Constants.CT_FHIR_NDJSON);
+
+ Batch2JobStartResponse job = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ myBatch2JobHelper.awaitJobCompletion(job.getJobId(), 60);
+ ourLog.debug("Job status after awaiting - {}", myJobRunner.getJobInfo(job.getJobId()).getStatus());
+ await()
+ .atMost(300, TimeUnit.SECONDS)
+ .until(() -> {
+ BulkExportJobStatusEnum status = myJobRunner.getJobInfo(job.getJobId()).getStatus();
+ if (!BulkExportJobStatusEnum.COMPLETE.equals(status)) {
+ fail("Job status was changed from COMPLETE to " + status);
+ }
+ return myJobRunner.getJobInfo(job.getJobId()).getReport() != null;
+ });
+
+ String report = myJobRunner.getJobInfo(job.getJobId()).getReport();
+ BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
+ List binaryUrls = results.getResourceTypeToBinaryIds().get("Patient");
+
+ IParser jsonParser = myFhirContext.newJsonParser();
+ for(String url : binaryUrls){
+ Binary binary = myClient.read().resource(Binary.class).withUrl(url).execute();
+ assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
+ String resourceContents = new String(binary.getContent(), Constants.CHARSET_UTF8);
+ String resourceId = jsonParser.parseResource(resourceContents).getIdElement().getIdPart();
+ assertTrue(patientIds.contains(resourceId));
+ }
+ }
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java
index 50c405bbba8..964367b365e 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java
@@ -9,6 +9,7 @@ import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.PatientReindexTestHelper;
@@ -23,15 +24,19 @@ import org.junit.jupiter.params.provider.MethodSource;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.PostConstruct;
+import java.util.Date;
import java.util.List;
import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class ReindexJobTest extends BaseJpaR4Test {
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ReindexJobTest.class);
+
@Autowired
private IJobCoordinator myJobCoordinator;
@@ -89,6 +94,41 @@ public class ReindexJobTest extends BaseJpaR4Test {
myDaoConfig.setMarkResourcesForReindexingUponSearchParameterChange(reindexPropertyCache);
}
+ @Test
+ public void testReindexDeletedResources_byUrl_willRemoveDeletedResourceEntriesFromIndexTables(){
+ IIdType obsId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL);
+
+ runInTransaction(() -> {
+ int entriesInSpIndexTokenTable = myResourceIndexedSearchParamTokenDao.countForResourceId(obsId.getIdPartAsLong());
+ assertThat(entriesInSpIndexTokenTable, equalTo(1));
+
+ // simulate resource deletion
+ ResourceTable resource = myResourceTableDao.findById(obsId.getIdPartAsLong()).get();
+ Date currentDate = new Date();
+ resource.setDeleted(currentDate);
+ resource.setUpdated(currentDate);
+ resource.setHashSha256(null);
+ resource.setVersion(2L);
+ myResourceTableDao.save(resource);
+ });
+
+ // execute reindexing
+ ReindexJobParameters parameters = new ReindexJobParameters();
+ parameters.addUrl("Observation?status=final");
+
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
+ startRequest.setParameters(parameters);
+ Batch2JobStartResponse res = myJobCoordinator.startInstance(startRequest);
+ myBatch2JobHelper.awaitJobCompletion(res);
+
+ // then
+ runInTransaction(() -> {
+ int entriesInSpIndexTokenTablePostReindexing = myResourceIndexedSearchParamTokenDao.countForResourceId(obsId.getIdPartAsLong());
+ assertThat(entriesInSpIndexTokenTablePostReindexing, equalTo(0));
+ });
+ }
+
@Test
public void testReindex_Everything() {
// setup
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java
index a51d95528bf..5fc0faa02de 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java
@@ -47,8 +47,8 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test {
Assertions.assertEquals("replace", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "type"));
Assertions.assertEquals("Patient.text.div", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "path"));
- Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "previousValue"));
- Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "value"));
+ Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "previousValue"));
+ Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 0, "operation", "value"));
Assertions.assertEquals("insert", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 1, "operation", "type"));
Assertions.assertEquals("Patient.name", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 1, "operation", "path"));
@@ -86,8 +86,8 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test {
Assertions.assertEquals("replace", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "type"));
Assertions.assertEquals("Patient.text.div", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "path"));
- Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "previousValue"));
- Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "value"));
+ Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "previousValue"));
+ Assertions.assertEquals("", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 2, "operation", "value"));
Assertions.assertEquals("insert", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 3, "operation", "type"));
Assertions.assertEquals("Patient.name", FhirPatchApplyR4Test.extractPartValuePrimitive(diff, 3, "operation", "path"));
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ExpungeR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ExpungeR4Test.java
index 05a4e457d4b..362f87c3572 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ExpungeR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ExpungeR4Test.java
@@ -9,6 +9,9 @@ import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
+import ca.uhn.fhir.jpa.entity.TermCodeSystem;
+import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
+import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
@@ -35,6 +38,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BooleanType;
+import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.DecimalType;
@@ -46,6 +50,7 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.SearchParameter;
+import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -58,11 +63,13 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
+import static ca.uhn.fhir.batch2.jobs.termcodesystem.TermCodeSystemJobConfig.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -77,6 +84,9 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
private IIdType myOneVersionObservationId;
private IIdType myTwoVersionObservationId;
private IIdType myDeletedObservationId;
+ private IIdType myOneVersionCodeSystemId;
+ private IIdType myTwoVersionCodeSystemIdV1;
+ private IIdType myTwoVersionCodeSystemIdV2;
@Autowired
private ISearchDao mySearchEntityDao;
@Autowired
@@ -200,6 +210,40 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
}
+ public void createStandardCodeSystems() {
+ CodeSystem codeSystem1 = new CodeSystem();
+ codeSystem1.setUrl(URL_MY_CODE_SYSTEM);
+ codeSystem1.setName("CS1-V1");
+ codeSystem1.setVersion("1");
+ codeSystem1.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
+ codeSystem1
+ .addConcept().setCode("C").setDisplay("Code C").addDesignation(
+ new CodeSystem.ConceptDefinitionDesignationComponent().setLanguage("en").setValue("CodeCDesignation")).addProperty(
+ new CodeSystem.ConceptPropertyComponent().setCode("CodeCProperty").setValue(new StringType("CodeCPropertyValue"))
+ )
+ .addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CA").setDisplay("Code CA")
+ .addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CAA").setDisplay("Code CAA"))
+ )
+ .addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CB").setDisplay("Code CB"));
+ codeSystem1
+ .addConcept().setCode("D").setDisplay("Code D");
+ myOneVersionCodeSystemId = myCodeSystemDao.create(codeSystem1).getId();
+
+ CodeSystem cs2v1 = new CodeSystem();
+ cs2v1.setUrl(URL_MY_CODE_SYSTEM_2);
+ cs2v1.setVersion("1");
+ cs2v1.setName("CS2-V1");
+ cs2v1.addConcept().setCode("E").setDisplay("Code E");
+ myTwoVersionCodeSystemIdV1 = myCodeSystemDao.create(cs2v1).getId();
+
+ CodeSystem cs2v2 = new CodeSystem();
+ cs2v2.setUrl(URL_MY_CODE_SYSTEM_2);
+ cs2v2.setVersion("2");
+ cs2v2.setName("CS2-V2");
+ cs2v2.addConcept().setCode("F").setDisplay("Code F");
+ myTwoVersionCodeSystemIdV2 = myCodeSystemDao.create(cs2v2).getId();
+ }
+
private IFhirResourceDao> getDao(IIdType theId) {
IFhirResourceDao> dao;
switch (theId.getResourceType()) {
@@ -209,6 +253,9 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
case "Observation":
dao = myObservationDao;
break;
+ case "CodeSystem":
+ dao = myCodeSystemDao;
+ break;
default:
fail("Restype: " + theId.getResourceType());
dao = myPatientDao;
@@ -809,6 +856,38 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
assertTrue(actualRemainingPatientHistoryRecords <= maximumRemainingPatientHistoryRecords);
}
+ @Test
+ public void testDeleteCodeSystemByUrlThenExpunge() {
+ createStandardCodeSystems();
+
+ myCodeSystemDao.deleteByUrl("CodeSystem?url=" + URL_MY_CODE_SYSTEM, null);
+ myTerminologyDeferredStorageSvc.saveDeferred();
+ myBatch2JobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
+ myCodeSystemDao.expunge(new ExpungeOptions()
+ .setExpungeDeletedResources(true)
+ .setExpungeOldVersions(true), null);
+
+ assertExpunged(myOneVersionCodeSystemId);
+ assertStillThere(myTwoVersionCodeSystemIdV1);
+ assertStillThere(myTwoVersionCodeSystemIdV2);
+ runInTransaction(() -> {
+ verifyOneVersionCodeSystemChildrenExpunged();
+ verifyTwoVersionCodeSystemV1AndChildrenStillThere();
+ verifyTwoVersionCodeSystemV2AndChildrenStillThere();
+ });
+
+ myCodeSystemDao.deleteByUrl("CodeSystem?url=" + URL_MY_CODE_SYSTEM_2, null);
+ myTerminologyDeferredStorageSvc.saveDeferred();
+ myBatch2JobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
+ myCodeSystemDao.expunge(new ExpungeOptions()
+ .setExpungeDeletedResources(true)
+ .setExpungeOldVersions(true), null);
+
+ assertExpunged(myTwoVersionCodeSystemIdV1);
+ assertExpunged(myTwoVersionCodeSystemIdV2);
+ runInTransaction(this::verifyCodeSystemsAndChildrenExpunged);
+ }
+
private List createPatientsWithForcedIds(int theNumPatients) {
RequestDetails requestDetails = new SystemRequestDetails();
List createdPatients = new ArrayList<>();
@@ -839,4 +918,60 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
myPatientDao.delete(patient.getIdElement(), requestDetails);
}
}
+
+ private void verifyOneVersionCodeSystemChildrenExpunged() {
+ List myOneVersionCodeSystemVersions = myTermCodeSystemVersionDao.findByCodeSystemResourcePid(myOneVersionCodeSystemId.getIdPartAsLong());
+ assertEquals(0, myOneVersionCodeSystemVersions.size());
+ assertThat(myTermConceptDesignationDao.findAll(), empty());
+ assertThat(myTermConceptPropertyDao.findAll(), empty());
+ assertThat(myTermConceptParentChildLinkDao.findAll(), empty());
+ List existingCodeSystemConcepts = myTermConceptDao.findAll();
+ for (TermConcept tc : existingCodeSystemConcepts) {
+ if (tc.getCode().charAt(0) == 'C' || tc.getCode().charAt(0) == 'D') {
+ fail();
+ }
+ }
+ }
+
+ private void verifyTwoVersionCodeSystemV1AndChildrenStillThere() {
+ TermCodeSystem myTwoVersionCodeSystem = myTermCodeSystemDao.findByResourcePid(myTwoVersionCodeSystemIdV2.getIdPartAsLong());
+ TermCodeSystemVersion myTwoVersionCodeSystemVersion1 = verifyTermCodeSystemVersionExistsWithDisplayName("CS2-V1");
+ assertNotEquals(myTwoVersionCodeSystem.getCurrentVersion().getPid(), myTwoVersionCodeSystemVersion1.getPid());
+ List myTwoVersionCodeSystemVersion1Concepts = new ArrayList(myTwoVersionCodeSystemVersion1.getConcepts());
+ assertEquals(1, myTwoVersionCodeSystemVersion1Concepts.size());
+ TermConcept conceptE = myTwoVersionCodeSystemVersion1Concepts.get(0);
+ assertEquals("E", conceptE.getCode());
+ }
+
+ private void verifyTwoVersionCodeSystemV2AndChildrenStillThere() {
+ TermCodeSystem myTwoVersionCodeSystem = myTermCodeSystemDao.findByResourcePid(myTwoVersionCodeSystemIdV2.getIdPartAsLong());
+ TermCodeSystemVersion myTwoVersionCodeSystemVersion2 = verifyTermCodeSystemVersionExistsWithDisplayName("CS2-V2");
+ assertEquals(myTwoVersionCodeSystem.getCurrentVersion().getPid(), myTwoVersionCodeSystemVersion2.getPid());
+ List myTwoVersionCodeSystemVersion2Concepts = new ArrayList(myTwoVersionCodeSystemVersion2.getConcepts());
+ assertEquals(1, myTwoVersionCodeSystemVersion2Concepts.size());
+ TermConcept conceptF = myTwoVersionCodeSystemVersion2Concepts.get(0);
+ assertEquals("F", conceptF.getCode());
+ }
+
+ private TermCodeSystemVersion verifyTermCodeSystemVersionExistsWithDisplayName(String theDisplayName) {
+ List myCodeSystemVersions = myTermCodeSystemVersionDao.findAll();
+ for (TermCodeSystemVersion csv : myCodeSystemVersions) {
+ if (csv.getCodeSystemDisplayName().equals(theDisplayName)) {
+ return csv;
+ }
+ }
+ fail();
+ return null;
+ }
+
+ private void verifyCodeSystemsAndChildrenExpunged() {
+ assertThat(myTermCodeSystemVersionDao.findAll(), empty());
+ assertThat(myTermConceptDesignationDao.findAll(), empty());
+ assertThat(myTermConceptPropertyDao.findAll(), empty());
+ assertThat(myTermConceptParentChildLinkDao.findAll(), empty());
+ assertThat(myTermConceptDao.findAll(), empty());
+ assertThat(myResourceTableDao.findAll(), empty());
+ assertThat(myResourceHistoryTableDao.findAll(), empty());
+ assertThat(myForcedIdDao.findAll(), empty());
+ }
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index d99ff943d0e..d9e37b30660 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
@@ -12,6 +13,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
@@ -27,6 +29,7 @@ import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SummaryEnum;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IGenericClient;
@@ -309,12 +312,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertNotNull(id);
assertEquals("resource-security", id.getIdPart());
-
}
@Test
public void createSearchParameter_with2Expressions_succeeds() {
-
SearchParameter searchParameter = new SearchParameter();
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
@@ -326,7 +327,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
MethodOutcome result = myClient.create().resource(searchParameter).execute();
assertEquals(true, result.getCreated());
-
}
@Test
@@ -454,7 +454,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertThat(output, containsString(MSG_PREFIX_INVALID_FORMAT + "">""));
assertEquals(400, resp.getStatusLine().getStatusCode());
}
-
}
@@ -764,6 +763,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
public void testUpdateResourceAfterReadOperationAndNoChangesShouldNotChangeVersion(){
// Create Patient
Patient patient = new Patient();
+ patient.getText().setDivAsString("hello
");
+
patient = (Patient) myClient.create().resource(patient).execute().getResource();
assertEquals(1, patient.getIdElement().getVersionIdPartAsLong());
@@ -909,7 +910,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@Test
@Disabled
- public void test() throws IOException {
+ public void testMakingQuery() throws IOException {
HttpGet get = new HttpGet(myServerBase + "/QuestionnaireResponse?_count=50&status=completed&questionnaire=ARIncenterAbsRecord&_lastUpdated=%3E" + UrlUtil.escapeUrlParam("=2018-01-01") + "&context.organization=O3435");
ourLog.info("*** MAKING QUERY");
ourHttpClient.execute(get);
@@ -7554,6 +7555,113 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertTrue(resultIds.contains("Patient/" + patientId + "/_history/2"));
}
+
+ private static class CreateResourceInput {
+ boolean IsEnforceRefOnWrite;
+ boolean IsEnforceRefOnType;
+ boolean IsAutoCreatePlaceholderReferences;
+
+ public CreateResourceInput(
+ boolean theEnforceRefOnWrite,
+ boolean theEnforceRefOnType,
+ boolean theAutoCreatePlaceholders
+ ) {
+ IsEnforceRefOnWrite = theEnforceRefOnWrite;
+ IsEnforceRefOnType = theEnforceRefOnType;
+ IsAutoCreatePlaceholderReferences = theAutoCreatePlaceholders;
+ }
+
+ @Override
+ public String toString() {
+ return "IsEnforceReferentialIntegrityOnWrite : "
+ + IsEnforceRefOnWrite + "\n"
+ + "IsEnforceReferenceTargetTypes : "
+ + IsEnforceRefOnType + "\n"
+ + "IsAutoCreatePlaceholderReferenceTargets : "
+ + IsAutoCreatePlaceholderReferences + "\n";
+ }
+ }
+
+ private static List createResourceParameters() {
+ boolean[] bools = new boolean[] { true, false };
+ List input = new ArrayList<>();
+ for (boolean bool : bools) {
+ for (boolean bool2 : bools) {
+ for (boolean bool3 : bools) {
+ input.add(new CreateResourceInput(bool, bool2, bool3));
+ }
+ }
+ }
+ return input;
+ }
+
+ @ParameterizedTest
+ @MethodSource("createResourceParameters")
+ public void createResource_refIntegrityOnWriteAndRefTargetTypes_throws(CreateResourceInput theInput) {
+ ourLog.info(
+ String.format("Test case : \n%s", theInput.toString())
+ );
+
+ String patientStr = """
+ {
+ "resourceType": "Patient",
+ "managingOrganization": {
+ "reference": "urn:uuid:d8080e87-1842-46b4-aea0-b65803bc2897"
+ }
+ }
+ """;
+ IParser parser = myFhirContext.newJsonParser();
+ Patient patient = parser.parseResource(Patient.class, patientStr);
+
+ {
+ List orgs = myOrganizationDao
+ .search(new SearchParameterMap(), new SystemRequestDetails())
+ .getAllResources();
+
+ assertTrue(orgs == null || orgs.isEmpty());
+ }
+
+ boolean isEnforceRefOnWrite = myDaoConfig.isEnforceReferentialIntegrityOnWrite();
+ boolean isEnforceRefTargetTypes = myDaoConfig.isEnforceReferenceTargetTypes();
+ boolean isAutoCreatePlaceholderReferences = myDaoConfig.isAutoCreatePlaceholderReferenceTargets();
+
+ try {
+ // allows resources to be created even if they have local resources that do not exist
+ myDaoConfig.setEnforceReferentialIntegrityOnWrite(theInput.IsEnforceRefOnWrite);
+ // ensures target references are using the correct resource type
+ myDaoConfig.setEnforceReferenceTargetTypes(theInput.IsEnforceRefOnType);
+ // will create the resource if it does not already exist
+ myDaoConfig.setAutoCreatePlaceholderReferenceTargets(theInput.IsAutoCreatePlaceholderReferences);
+
+ // should fail
+ DaoMethodOutcome result = myPatientDao.create(patient, new SystemRequestDetails());
+
+ // a bad reference can never create a new resource
+ {
+ List orgs = myOrganizationDao
+ .search(new SearchParameterMap(), new SystemRequestDetails())
+ .getAllResources();
+
+ assertTrue(orgs == null || orgs.isEmpty());
+ }
+
+ // only if all 3 are true do we expect this to fail
+ assertFalse(
+ theInput.IsAutoCreatePlaceholderReferences
+ && theInput.IsEnforceRefOnType
+ && theInput.IsEnforceRefOnWrite
+ );
+ } catch (InvalidRequestException ex) {
+ assertTrue(ex.getMessage().contains(
+ "Invalid resource reference"
+ ), ex.getMessage());
+ } finally {
+ myDaoConfig.setEnforceReferentialIntegrityOnWrite(isEnforceRefOnWrite);
+ myDaoConfig.setEnforceReferenceTargetTypes(isEnforceRefTargetTypes);
+ myDaoConfig.setAutoCreatePlaceholderReferenceTargets(isAutoCreatePlaceholderReferences);
+ }
+ }
+
@Test
public void searchResource_bySourceWithPreserveRequestIdDisabled_isSuccess() {
String sourceUri = "http://acme.org";
@@ -8083,5 +8191,4 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
);
}
}
-
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
index b93e8c42edc..45628664675 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
@@ -477,6 +477,7 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
myCodeSystemDao.update(codeSystem, mySrd);
await().until(() -> {
+ myBatch2JobHelper.runMaintenancePass();
myTerminologyDeferredStorageSvc.saveAllDeferred();
return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true);
}, equalTo(true));
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index 309fc0bfe0d..8c3d6add245 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index b9cb45f6d29..4c6573c7514 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
index 9cb7aef1cb9..2efa9a0c90c 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
@@ -227,19 +227,12 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
try (CloseableHttpResponse resp = ourHttpClient.execute(post)) {
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
ourLog.debug(respString);
-// assertEquals(200, resp.getStatusLine().getStatusCode());
+ assertEquals(200, resp.getStatusLine().getStatusCode());
+ // As of 2023-01-26, the above line was restored.
// As of 2021-12-28, the R5 structures return a version string that isn't
// actually in the fhirVersion ValueSet. If this stops being the case this
// test will fail and the line above should be restored
- OperationOutcome oo = myFhirCtx.newJsonParser().parseResource(OperationOutcome.class, respString);
- assertEquals(1, oo.getIssue().size());
-// assertThat(oo.getIssue().get(0).getDiagnostics(), containsString("is not in the value set 'FHIRVersion'"));
- //As of 2022-10-06, the error is now that RequestGroup is not in the resourcetypes valueset, (though it is).
-
- //TODO JA: I'm not sure if i have to update this valueset somewhere? the linked valueset _does_ contain the resource type.
- assertThat(oo.getIssue().get(0).getDiagnostics(), containsString("is not in the value set 'Resource Types'"));
-
}
}
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index d1314d7195f..fcdeab920a9 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
index ef01e04ed50..8176af776e4 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
@@ -60,6 +60,7 @@ import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
+import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
@@ -216,6 +217,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
public static final String MY_VALUE_SET = "my-value-set";
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
+ public static final String URL_MY_CODE_SYSTEM_2 = "http://example.com/my_code_system_2";
@Autowired
protected IPackageInstallerSvc myPackageInstallerSvc;
@@ -293,6 +295,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
@Qualifier("myCodeSystemDaoR4")
protected IFhirResourceDaoCodeSystem myCodeSystemDao;
@Autowired
+ protected ITermCodeSystemDao myTermCodeSystemDao;
+ @Autowired
protected ITermConceptParentChildLinkDao myTermConceptParentChildLinkDao;
@Autowired
@Qualifier("myCompartmentDefinitionDaoR4")
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java
index 0af74a7fa66..c218b07ab23 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/Batch2JobHelper.java
@@ -71,7 +71,6 @@ public class Batch2JobHelper {
return awaitJobHasStatusWithoutMaintenancePass(theBatchJobId, StatusEnum.COMPLETED);
}
-
public JobInstance awaitJobCancelled(String theBatchJobId) {
return awaitJobHasStatus(theBatchJobId, StatusEnum.CANCELLED);
}
@@ -106,7 +105,6 @@ public class Batch2JobHelper {
return myJobCoordinator.getInstance(theBatchJobId);
}
-
public JobInstance awaitJobawaitJobHasStatusWithoutMaintenancePass(String theBatchJobId, int theSecondsToWait, StatusEnum... theExpectedStatus) {
assert !TransactionSynchronizationManager.isActualTransactionActive();
@@ -168,7 +166,6 @@ public class Batch2JobHelper {
public long getCombinedRecordsProcessed(String theJobId) {
JobInstance job = myJobCoordinator.getInstance(theJobId);
return job.getCombinedRecordsProcessed();
-
}
public void awaitAllJobsOfJobDefinitionIdToComplete(String theJobDefinitionId) {
@@ -243,6 +240,14 @@ public class Batch2JobHelper {
myJobMaintenanceService.runMaintenancePass();
}
+ /**
+ * Forces a run of the maintenance pass without waiting for
+ * the semaphore to release
+ */
+ public void forceRunMaintenancePass() {
+ myJobMaintenanceService.forceMaintenancePass();
+ }
+
public void cancelAllJobsAndAwaitCancellation() {
List instances = myJobPersistence.fetchInstances(1000, 0);
for (JobInstance next : instances) {
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index 49e2221ba3a..15479158e12 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index 34e52715915..1d3328549a1 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java
index 6d5f4268dda..d443e8b2926 100644
--- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java
+++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java
@@ -29,20 +29,20 @@ public final class MdmMatchOutcome {
public static final MdmMatchOutcome POSSIBLE_DUPLICATE = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_DUPLICATE);
public static final MdmMatchOutcome NO_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.NO_MATCH);
- public static final MdmMatchOutcome NEW_GOLDEN_RESOURCE_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.MATCH).setCreatedNewResource(true);
+ public static final MdmMatchOutcome NEW_GOLDEN_RESOURCE_MATCH = new MdmMatchOutcome(null, 1.0).setMatchResultEnum(MdmMatchResultEnum.MATCH).setCreatedNewResource(true);
public static final MdmMatchOutcome EID_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.MATCH).setEidMatch(true);
public static final MdmMatchOutcome POSSIBLE_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH);
/**
* A bitmap that indicates which rules matched
*/
- public final Long vector;
+ private final Long vector;
/**
* The sum of all scores for all rules evaluated. Similarity rules add the similarity score (between 0.0 and 1.0) whereas
* matcher rules add either a 0.0 or 1.0.
*/
- public final Double score;
+ private final Double score;
/**
* Did the MDM match operation result in creating a new golden resource resource?
@@ -134,6 +134,10 @@ public final class MdmMatchOutcome {
return this;
}
+ public Double getScore() { return score; }
+
+ public Long getVector() { return vector; }
+
/**
* Gets normalized score that is in the range from zero to one
*
@@ -141,7 +145,10 @@ public final class MdmMatchOutcome {
* Returns the normalized score
*/
public Double getNormalizedScore() {
- if (myMdmRuleCount == 0) {
+ if (myCreatedNewResource) {
+ // If we created a new golden resource from this match, the match score must be 1.00
+ return 1.0;
+ } else if (myMdmRuleCount == 0) {
return 0.0;
}
return score / myMdmRuleCount;
diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java
index f504d5a2912..571f747372e 100644
--- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java
+++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java
@@ -89,12 +89,12 @@ public class MdmResourceMatcherSvc {
MdmMatchOutcome match(IBaseResource theLeftResource, IBaseResource theRightResource) {
MdmMatchOutcome matchResult = getMatchOutcome(theLeftResource, theRightResource);
- MdmMatchResultEnum matchResultEnum = myMdmRulesJson.getMatchResult(matchResult.vector);
+ MdmMatchResultEnum matchResultEnum = myMdmRulesJson.getMatchResult(matchResult.getVector());
matchResult.setMatchResultEnum(matchResultEnum);
if (ourLog.isDebugEnabled()) {
ourLog.debug("{} {}: {}", matchResult.getMatchResultEnum(), theRightResource.getIdElement().toUnqualifiedVersionless(), matchResult);
if (ourLog.isTraceEnabled()) {
- ourLog.trace("Field matcher results:\n{}", myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.vector));
+ ourLog.trace("Field matcher results:\n{}", myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.getVector()));
}
}
return matchResult;
@@ -133,8 +133,8 @@ public class MdmResourceMatcherSvc {
ourLog.trace("Matcher {} is valid for resource type: {}. Evaluating match.", fieldComparator.getName(), resourceType);
MdmMatchEvaluation matchEvaluation = fieldComparator.match(theLeftResource, theRightResource);
if (matchEvaluation.match) {
- vector |= (1 << i);
- ourLog.trace("Match: Successfully matched matcher {} with score {}.", fieldComparator.getName(), matchEvaluation.score);
+ vector |= (1L << i);
+ ourLog.trace("Match: Successfully matched matcher {} with score {}. New vector: {}", fieldComparator.getName(), matchEvaluation.score, vector);
} else {
ourLog.trace("No match: Matcher {} did not match (score: {}).", fieldComparator.getName(), matchEvaluation.score);
}
diff --git a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java
index 337f0f1afa9..494cd93992d 100644
--- a/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java
+++ b/hapi-fhir-server-mdm/src/test/java/ca/uhn/fhir/mdm/BaseR4Test.java
@@ -43,8 +43,8 @@ public abstract class BaseR4Test {
}
protected void assertMatchResult(MdmMatchResultEnum theExpectedMatchEnum, long theExpectedVector, double theExpectedScore, boolean theExpectedNewGoldenResource, boolean theExpectedEidMatch, MdmMatchOutcome theMatchResult) {
- assertEquals(theExpectedScore, theMatchResult.score, 0.001);
- assertEquals(theExpectedVector, theMatchResult.vector);
+ assertEquals(theExpectedScore, theMatchResult.getScore(), 0.001);
+ assertEquals(theExpectedVector, theMatchResult.getVector());
assertEquals(theExpectedEidMatch, theMatchResult.isEidMatch());
assertEquals(theExpectedNewGoldenResource, theMatchResult.isCreatedNewResource());
assertEquals(theExpectedMatchEnum, theMatchResult.getMatchResultEnum());
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index afc55be6399..5f42e456352 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index d4f12e754ee..ecd919eecd3 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java
index b5c465671cc..4195b3a1fe6 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java
@@ -28,6 +28,8 @@ import org.springframework.messaging.MessageHeaders;
import javax.annotation.Nullable;
+import static java.util.Objects.isNull;
+
public abstract class BaseJsonMessage implements Message, IModelJson {
private static final long serialVersionUID = 1L;
@@ -53,6 +55,9 @@ public abstract class BaseJsonMessage implements Message, IModelJson {
}
public HapiMessageHeaders getHapiHeaders() {
+ if (isNull(myHeaders)) {
+ setDefaultRetryHeaders();
+ }
return myHeaders;
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java
index 656b4c3cd24..0478f4cbc89 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java
@@ -27,6 +27,8 @@ import org.springframework.messaging.MessageHeaders;
import java.util.HashMap;
import java.util.Map;
+import static java.util.Objects.isNull;
+
/**
* This class is for holding headers for BaseJsonMessages. Any serializable data can be thrown into
* the header map. There are also three special headers, defined by the constants in this class, which are for use
@@ -57,6 +59,9 @@ public class HapiMessageHeaders implements IModelJson {
}
public Integer getRetryCount() {
+ if (isNull(this.myRetryCount)) {
+ return 0;
+ }
return this.myRetryCount;
}
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
index b182513178a..0a13bfa98c1 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
index 84d52892290..09bb2ee35e9 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
@@ -20,7 +20,7 @@
ca.uhn.hapi.fhir
hapi-fhir-caching-api
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
com.github.ben-manes.caffeine
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
index 3e2e09f2cd4..2e9be45702c 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
index 589d0b19818..160568144b6 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir
ca.uhn.hapi.fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../pom.xml
diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml
index 5581ed3b03d..edb3b3af712 100644
--- a/hapi-fhir-serviceloaders/pom.xml
+++ b/hapi-fhir-serviceloaders/pom.xml
@@ -5,7 +5,7 @@
hapi-fhir
ca.uhn.hapi.fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 332e9c99314..b22acd2b0f6 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index 3b4a537586a..d5c554c3224 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
hapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index 3fa44b39612..ff86a9a217b 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
hapi-fhir-spring-boot-sample-client-okhttp
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index 8ea45a9e9e5..f5f741469cf 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
hapi-fhir-spring-boot-sample-server-jersey
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index bfd0ff929f5..0f5ad45e0f5 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
hapi-fhir-spring-boot-samples
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index d17f78539b4..3f544924474 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index 8b7da530410..429f4197a9f 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml
index 2294911d5c6..488e5953e3d 100644
--- a/hapi-fhir-sql-migrate/pom.xml
+++ b/hapi-fhir-sql-migrate/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java
index aa879a29654..a4db4201aec 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java
@@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
* The approach used in this class is borrowed from org.flywaydb.community.database.ignite.thin.IgniteThinDatabase
*/
public class HapiMigrationLock implements AutoCloseable {
- static final Integer LOCK_PID = -100;
+ public static final Integer LOCK_PID = -100;
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationLock.class);
public static final int SLEEP_MILLIS_BETWEEN_LOCK_RETRIES = 1000;
public static final int DEFAULT_MAX_RETRY_ATTEMPTS = 50;
@@ -111,7 +111,11 @@ public class HapiMigrationLock implements AutoCloseable {
private boolean insertLockingRow() {
try {
- return myMigrationStorageSvc.insertLockRecord(myLockDescription);
+ boolean storedSuccessfully = myMigrationStorageSvc.insertLockRecord(myLockDescription);
+ if (storedSuccessfully) {
+ ourLog.info("Migration Lock Row added. [uuid={}]", myLockDescription);
+ }
+ return storedSuccessfully;
} catch (Exception e) {
ourLog.debug("Failed to insert lock record: {}", e.getMessage());
return false;
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java
index a1baa875bac..dad8d0a4945 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java
@@ -31,7 +31,7 @@ import java.util.Set;
public class HapiMigrationStorageSvc {
public static final String UNKNOWN_VERSION = "unknown";
- private static final String LOCK_TYPE = "hapi-fhir-lock";
+ public static final String LOCK_TYPE = "hapi-fhir-lock";
private final HapiMigrationDao myHapiMigrationDao;
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
index 5403bbac2ff..92a71772051 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
@@ -100,6 +100,20 @@ public class HapiMigrator {
return statementBuilder;
}
+ /**
+ * Helper method to clear a lock with a given UUID.
+ * @param theUUID the
+ */
+ public void clearMigrationLockWithUUID(String theUUID) {
+ ourLog.info("Attempting to remove lock entry. [uuid={}]", theUUID);
+ boolean success = myHapiMigrationStorageSvc.deleteLockRecord(theUUID);
+ if (success) {
+ ourLog.info("Successfully removed lock entry. [uuid={}]", theUUID);
+ } else {
+ ourLog.error("Did not successfully remove lock entry. [uuid={}]", theUUID);
+ }
+ }
+
public MigrationResult migrate() {
ourLog.info("Loaded {} migration tasks", myTaskList.size());
MigrationResult retval = new MigrationResult();
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java
index b3714ea91d0..04c4e5a8180 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java
@@ -31,7 +31,6 @@ import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
-import org.hibernate.annotations.GenericGenerator;
import java.util.Date;
// Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa
diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml
index 7d2da2c06d0..75fa906cf8d 100644
--- a/hapi-fhir-storage-batch2-jobs/pom.xml
+++ b/hapi-fhir-storage-batch2-jobs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
index 81bfe63e93e..bc359249108 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
@@ -36,6 +36,8 @@ import org.springframework.context.annotation.Scope;
@Configuration
public class BulkExportAppCtx {
+ public static final String WRITE_TO_BINARIES = "write-to-binaries";
+
@Bean
public JobDefinition bulkExportJobDefinition() {
JobDefinition.Builder builder = JobDefinition.newBuilder();
@@ -63,7 +65,7 @@ public class BulkExportAppCtx {
)
// write binaries and save to db
.addIntermediateStep(
- "write-to-binaries",
+ WRITE_TO_BINARIES,
"Writes the expanded resources to the binaries and saves",
BulkExportBinaryFileId.class,
writeBinaryStep()
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java
new file mode 100644
index 00000000000..c0102f94abf
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java
@@ -0,0 +1,57 @@
+package ca.uhn.fhir.batch2.jobs.export;
+
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.batch2.model.StatusEnum;
+import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
+import org.slf4j.Logger;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+public class BulkExportUtil {
+ private static final Logger ourLog = getLogger(BulkExportUtil.class);
+
+ private BulkExportUtil() {
+
+ }
+
+ /**
+ * Converts Batch2 StatusEnum -> BulkExportJobStatusEnum
+ */
+ public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) {
+ switch (status) {
+ case QUEUED:
+ case FINALIZE:
+ return BulkExportJobStatusEnum.SUBMITTED;
+ case COMPLETED :
+ return BulkExportJobStatusEnum.COMPLETE;
+ case IN_PROGRESS:
+ return BulkExportJobStatusEnum.BUILDING;
+ default:
+ ourLog.warn("Unrecognized status {}; treating as FAILED/CANCELLED/ERRORED", status.name());
+ case FAILED:
+ case CANCELLED:
+ case ERRORED:
+ return BulkExportJobStatusEnum.ERROR;
+ }
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java
index 3b4b356a3c6..17548aeef8f 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java
@@ -22,10 +22,10 @@ package ca.uhn.fhir.batch2.jobs.services;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
+import ca.uhn.fhir.batch2.jobs.export.BulkExportUtil;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
-import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
@@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import org.slf4j.Logger;
@@ -97,7 +96,10 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner {
private Batch2JobInfo fromJobInstanceToBatch2JobInfo(@Nonnull JobInstance theInstance) {
Batch2JobInfo info = new Batch2JobInfo();
info.setJobId(theInstance.getInstanceId());
- info.setStatus(fromBatchStatus(theInstance.getStatus()));
+ // should convert this to a more generic enum for all batch2 (which is what it seems like)
+ // or use the status enum only (combine with bulk export enum)
+ // on the Batch2JobInfo
+ info.setStatus(BulkExportUtil.fromBatchStatus(theInstance.getStatus()));
info.setCancelled(theInstance.isCancelled());
info.setStartTime(theInstance.getStartTime());
info.setEndTime(theInstance.getEndTime());
@@ -106,22 +108,6 @@ public class Batch2JobRunnerImpl implements IBatch2JobRunner {
return info;
}
- public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) {
- switch (status) {
- case QUEUED:
- return BulkExportJobStatusEnum.SUBMITTED;
- case COMPLETED :
- return BulkExportJobStatusEnum.COMPLETE;
- case IN_PROGRESS:
- return BulkExportJobStatusEnum.BUILDING;
- case FAILED:
- case CANCELLED:
- case ERRORED:
- default:
- return BulkExportJobStatusEnum.ERROR;
- }
- }
-
private Batch2JobStartResponse startBatch2BulkExportJob(BulkExportParameters theParameters) {
JobInstanceStartRequest request = createStartRequest(theParameters);
request.setParameters(BulkExportJobParameters.createFromExportJobParameters(theParameters));
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java
index 75cc6e5462b..850308279b1 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java
@@ -21,6 +21,7 @@ import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
+import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.UrlType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.MethodOrderer;
@@ -28,6 +29,8 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
@@ -72,11 +75,12 @@ public class BulkDataImportProviderTest {
myProvider.setJobCoordinator(myJobCoordinator);
}
- @Test
- public void testStart_Success() throws IOException {
+ @ParameterizedTest
+ @ValueSource(classes = {UrlType.class, UriType.class})
+ public void testStart_Success(Class> type) throws IOException {
// Setup
- Parameters input = createRequest();
+ Parameters input = createRequest(type);
ourLog.debug("Input: {}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
String jobId = UUID.randomUUID().toString();
@@ -168,11 +172,15 @@ public class BulkDataImportProviderTest {
}
+ @Nonnull Parameters createRequest() {
+ return createRequest(UriType.class);
+ }
+
@Nonnull
- private Parameters createRequest() {
+ private Parameters createRequest(Class> type) {
Parameters input = new Parameters();
input.addParameter(BulkDataImportProvider.PARAM_INPUT_FORMAT, new CodeType(Constants.CT_FHIR_NDJSON));
- input.addParameter(BulkDataImportProvider.PARAM_INPUT_SOURCE, new UrlType("http://foo"));
+ input.addParameter(BulkDataImportProvider.PARAM_INPUT_SOURCE, type == UrlType.class ? new UrlType("http://foo") : new UriType("http://foo"));
input.addParameter()
.setName(BulkDataImportProvider.PARAM_STORAGE_DETAIL)
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE).setValue(new CodeType(BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE_VAL_HTTPS)))
@@ -181,11 +189,11 @@ public class BulkDataImportProviderTest {
input.addParameter()
.setName(BulkDataImportProvider.PARAM_INPUT)
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_TYPE).setValue(new CodeType("Observation")))
- .addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(new UrlType("http://example.com/Observation")));
+ .addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(type == UrlType.class ? new UrlType("http://example.com/Observation") : new UriType("http://example.com/Observation")));
input.addParameter()
.setName(BulkDataImportProvider.PARAM_INPUT)
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_TYPE).setValue(new CodeType("Patient")))
- .addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(new UrlType("http://example.com/Patient")));
+ .addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(type == UrlType.class ? new UrlType("http://example.com/Patient") : new UriType("http://example.com/Patient")));
return input;
}
diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml
index 47e6c81d6e0..29885285c8e 100644
--- a/hapi-fhir-storage-batch2/pom.xml
+++ b/hapi-fhir-storage-batch2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java
index 1bdb55853dc..f3c4a9e2428 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java
@@ -20,6 +20,8 @@ package ca.uhn.fhir.batch2.api;
* #L%
*/
+import com.google.common.annotations.VisibleForTesting;
+
public interface IJobMaintenanceService {
/**
* Do not wait for the next scheduled time for maintenance. Trigger it immediately.
@@ -29,4 +31,10 @@ public interface IJobMaintenanceService {
void runMaintenancePass();
+ /**
+ * Forces a second maintenance run.
+ * Only to be used in tests to simulate a long running maintenance step
+ */
+ @VisibleForTesting
+ void forceMaintenancePass();
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java
index 1956d3b1199..b4a4d838ad5 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java
@@ -27,14 +27,18 @@ import ca.uhn.fhir.batch2.model.MarkWorkChunkAsErrorRequest;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
+import ca.uhn.fhir.i18n.Msg;
import org.springframework.data.domain.Page;
+import org.springframework.data.domain.Pageable;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
+import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
+import java.util.stream.Stream;
public interface IJobPersistence {
@@ -73,6 +77,10 @@ public interface IJobPersistence {
*/
Optional fetchInstance(String theInstanceId);
+ default List fetchInstances(String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable) {
+ throw new UnsupportedOperationException(Msg.code(2271) + "Unsupported operation in this implementation");
+ }
+
/**
* Fetches any existing jobs matching provided request parameters
* @return
@@ -189,14 +197,24 @@ public interface IJobPersistence {
Iterator fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData);
/**
+ * Deprecated, use {@link ca.uhn.fhir.batch2.api.IJobPersistence#fetchAllWorkChunksForStepStream(String, String)}
* Fetch all chunks with data for a given instance for a given step id
* @param theInstanceId
* @param theStepId
* @return - an iterator for fetching work chunks
*/
+ @Deprecated
Iterator fetchAllWorkChunksForStepIterator(String theInstanceId, String theStepId);
+ /**
+ * Fetch all chunks with data for a given instance for a given step id
+ * @param theInstanceId
+ * @param theStepId
+ * @return - a stream for fetching work chunks
+ */
+ Stream fetchAllWorkChunksForStepStream(String theInstanceId, String theStepId);
+
/**
* Update the stored instance. If the status is changing, use {@link ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater}
* instead to ensure state-change callbacks are invoked properly.
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java
index 41bfa35d00f..a781bb75f6b 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java
@@ -39,6 +39,7 @@ import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.transaction.PlatformTransactionManager;
@Configuration
public abstract class BaseBatch2Config {
@@ -56,8 +57,8 @@ public abstract class BaseBatch2Config {
}
@Bean
- public WorkChunkProcessor jobStepExecutorService(BatchJobSender theBatchJobSender) {
- return new WorkChunkProcessor(myPersistence, theBatchJobSender);
+ public WorkChunkProcessor jobStepExecutorService(BatchJobSender theBatchJobSender, PlatformTransactionManager theTransactionManager) {
+ return new WorkChunkProcessor(myPersistence, theBatchJobSender, theTransactionManager);
}
@Bean
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java
index 1a3b5c4b040..897ce6b86bd 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java
@@ -28,8 +28,8 @@ import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobWorkCursor;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater;
-import ca.uhn.fhir.util.Logs;
import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.util.Logs;
import org.slf4j.Logger;
import javax.annotation.Nonnull;
@@ -53,7 +53,7 @@ public class JobStepExecutor theCursor,
@Nonnull WorkChunkProcessor theExecutor, IJobMaintenanceService theJobMaintenanceService) {
myJobPersistence = theJobPersistence;
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java
index c00d048eb9b..4b5ba0e48d1 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java
@@ -46,7 +46,7 @@ public class JobStepExecutorFactory {
myJobMaintenanceService = theJobMaintenanceService;
}
- public JobStepExecutor newJobStepExecutor(@Nonnull JobInstance theInstance, @Nonnull WorkChunk theWorkChunk, @Nonnull JobWorkCursor theCursor) {
+ public JobStepExecutor newJobStepExecutor(@Nonnull JobInstance theInstance, WorkChunk theWorkChunk, @Nonnull JobWorkCursor theCursor) {
return new JobStepExecutor<>(myJobPersistence, myBatchJobSender, theInstance, theWorkChunk, theCursor, myJobStepExecutorSvc, myJobMaintenanceService);
}
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java
new file mode 100644
index 00000000000..3ea08b57bd3
--- /dev/null
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java
@@ -0,0 +1,72 @@
+package ca.uhn.fhir.batch2.coordinator;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch2 Task Processor
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.batch2.model.WorkChunk;
+import org.apache.commons.collections4.CollectionUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ReductionStepChunkProcessingResponse {
+
+ private List mySuccessfulChunkIds;
+ private List myFailedChunksIds;
+ private boolean myIsSuccessful;
+
+ public ReductionStepChunkProcessingResponse(boolean theDefaultSuccessValue){
+ mySuccessfulChunkIds = new ArrayList<>();
+ myFailedChunksIds = new ArrayList<>();
+ myIsSuccessful = theDefaultSuccessValue;
+ }
+
+ public List getSuccessfulChunkIds() {
+ return mySuccessfulChunkIds;
+ }
+
+ public boolean hasSuccessfulChunksIds(){
+ return !CollectionUtils.isEmpty(mySuccessfulChunkIds);
+ }
+
+ public void addSuccessfulChunkId(WorkChunk theWorkChunk){
+ mySuccessfulChunkIds.add(theWorkChunk.getId());
+ }
+
+ public List getFailedChunksIds() {
+ return myFailedChunksIds;
+ }
+
+ public boolean hasFailedChunkIds(){
+ return !CollectionUtils.isEmpty(myFailedChunksIds);
+ }
+
+ public void addFailedChunkId(WorkChunk theWorChunk){
+ myFailedChunksIds.add(theWorChunk.getId());
+ }
+
+ public boolean isSuccessful(){
+ return myIsSuccessful;
+ }
+
+ public void setSuccessful(boolean theSuccessValue){
+ myIsSuccessful = theSuccessValue;
+ }
+}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java
index 94ce7eb89c6..4b4a88dbc74 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutor.java
@@ -28,20 +28,26 @@ import ca.uhn.fhir.batch2.model.JobDefinitionStep;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
-import ca.uhn.fhir.util.Logs;
import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.util.Logs;
import org.slf4j.Logger;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.support.TransactionTemplate;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
+import java.util.stream.Stream;
public class ReductionStepExecutor {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
private final IJobPersistence myJobPersistence;
+ private final PlatformTransactionManager myTxManager;
+ private final TransactionTemplate myTxTemplate;
- public ReductionStepExecutor(IJobPersistence theJobPersistence) {
+ public ReductionStepExecutor(IJobPersistence theJobPersistence, PlatformTransactionManager theTransactionManager) {
myJobPersistence = theJobPersistence;
+ myTxManager = theTransactionManager;
+ myTxTemplate = new TransactionTemplate(theTransactionManager);
+ myTxTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
}
/**
@@ -55,92 +61,44 @@ public class ReductionStepExecutor {
) {
IReductionStepWorker reductionStepWorker = (IReductionStepWorker) theStep.getJobStepWorker();
- // we mark it first so that no other maintenance passes will pick this job up!
- // if we shut down mid process, though, it will be stuck in FINALIZE forever :(
if (!myJobPersistence.markInstanceAsStatus(theInstance.getInstanceId(), StatusEnum.FINALIZE)) {
- ourLog.warn("JobInstance[{}] is already in FINALIZE state, no reducer action performed.", theInstance.getInstanceId());
+ ourLog.warn(
+ "JobInstance[{}] is already in FINALIZE state. In memory status is {}. Reduction step will not rerun!"
+ + " This could be a long running reduction job resulting in the processed msg not being acknowledge,"
+ + " or the result of a failed process or server restarting.",
+ theInstance.getInstanceId(),
+ theInstance.getStatus().name()
+ );
return false;
}
theInstance.setStatus(StatusEnum.FINALIZE);
- // We fetch all chunks first...
- Iterator chunkIterator = myJobPersistence.fetchAllWorkChunksForStepIterator(theInstance.getInstanceId(), theStep.getStepId());
-
- List failedChunks = new ArrayList<>();
- List successfulChunkIds = new ArrayList<>();
-
- boolean retval = true;
+ boolean defaultSuccessValue = true;
+ ReductionStepChunkProcessingResponse response = new ReductionStepChunkProcessingResponse(defaultSuccessValue);
try {
- while (chunkIterator.hasNext()) {
- WorkChunk chunk = chunkIterator.next();
- if (!chunk.getStatus().isIncomplete()) {
- // This should never happen since jobs with reduction are required to be gated
- ourLog.error("Unexpected chunk {} with status {} found while reducing {}. No chunks feeding into a reduction step should be complete.", chunk.getId(), chunk.getStatus(), theInstance);
- continue;
+ myTxTemplate.executeWithoutResult((status) -> {
+ try(Stream chunkIterator2 = myJobPersistence.fetchAllWorkChunksForStepStream(theInstance.getInstanceId(), theStep.getStepId())) {
+ chunkIterator2.forEach((chunk) -> {
+ processChunk(chunk, theInstance, theInputType, theParameters, reductionStepWorker, response);
+ });
}
-
- if (!failedChunks.isEmpty()) {
- // we are going to fail all future chunks now
- failedChunks.add(chunk.getId());
- } else {
- try {
- // feed them into our reduction worker
- // this is the most likely area to throw,
- // as this is where db actions and processing is likely to happen
- ChunkExecutionDetails chunkDetails = new ChunkExecutionDetails<>(chunk.getData(theInputType), theParameters, theInstance.getInstanceId(), chunk.getId());
-
- ChunkOutcome outcome = reductionStepWorker.consume(chunkDetails);
-
- switch (outcome.getStatuss()) {
- case SUCCESS:
- successfulChunkIds.add(chunk.getId());
- break;
-
- case ABORT:
- ourLog.error("Processing of work chunk {} resulted in aborting job.", chunk.getId());
-
- // fail entire job - including all future workchunks
- failedChunks.add(chunk.getId());
- retval = false;
- break;
-
- case FAIL:
- myJobPersistence.markWorkChunkAsFailed(chunk.getId(),
- "Step worker failed to process work chunk " + chunk.getId());
- retval = false;
- break;
- }
- } catch (Exception e) {
- String msg = String.format(
- "Reduction step failed to execute chunk reduction for chunk %s with exception: %s.",
- chunk.getId(),
- e.getMessage()
- );
- // we got a failure in a reduction
- ourLog.error(msg, e);
- retval = false;
-
- myJobPersistence.markWorkChunkAsFailed(chunk.getId(), msg);
- }
- }
- }
-
+ });
} finally {
- if (!successfulChunkIds.isEmpty()) {
+ if (response.hasSuccessfulChunksIds()) {
// complete the steps without making a new work chunk
myJobPersistence.markWorkChunksWithStatusAndWipeData(theInstance.getInstanceId(),
- successfulChunkIds,
+ response.getSuccessfulChunkIds(),
StatusEnum.COMPLETED,
null // error message - none
);
}
- if (!failedChunks.isEmpty()) {
+ if (response.hasFailedChunkIds()) {
// mark any failed chunks as failed for aborting
myJobPersistence.markWorkChunksWithStatusAndWipeData(theInstance.getInstanceId(),
- failedChunks,
+ response.getFailedChunksIds(),
StatusEnum.FAILED,
"JOB ABORTED");
}
@@ -148,10 +106,72 @@ public class ReductionStepExecutor {
}
// if no successful chunks, return false
- if (successfulChunkIds.isEmpty()) {
- retval = false;
+ if (!response.hasSuccessfulChunksIds()) {
+ response.setSuccessful(false);
}
- return retval;
+ return response.isSuccessful();
+ }
+
+ private
+ void processChunk(WorkChunk theChunk,
+ JobInstance theInstance,
+ Class theInputType,
+ PT theParameters,
+ IReductionStepWorker theReductionStepWorker,
+ ReductionStepChunkProcessingResponse theResponseObject){
+
+ if (!theChunk.getStatus().isIncomplete()) {
+ // This should never happen since jobs with reduction are required to be gated
+ ourLog.error("Unexpected chunk {} with status {} found while reducing {}. No chunks feeding into a reduction step should be complete.", theChunk.getId(), theChunk.getStatus(), theInstance);
+ return;
+ }
+
+ if (theResponseObject.hasFailedChunkIds()) {
+ // we are going to fail all future chunks now
+ theResponseObject.addFailedChunkId(theChunk);
+ } else {
+ try {
+ // feed them into our reduction worker
+ // this is the most likely area to throw,
+ // as this is where db actions and processing is likely to happen
+ ChunkExecutionDetails chunkDetails = new ChunkExecutionDetails<>(theChunk.getData(theInputType), theParameters, theInstance.getInstanceId(), theChunk.getId());
+
+ ChunkOutcome outcome = theReductionStepWorker.consume(chunkDetails);
+
+ switch (outcome.getStatus()) {
+ case SUCCESS:
+ theResponseObject.addSuccessfulChunkId(theChunk);
+ break;
+
+ case ABORT:
+ ourLog.error("Processing of work chunk {} resulted in aborting job.", theChunk.getId());
+
+ // fail entire job - including all future workchunks
+ theResponseObject.addFailedChunkId(theChunk);
+ theResponseObject.setSuccessful(false);
+ break;
+
+ case FAIL:
+ // non-idempotent; but failed chunks will be
+ // ignored on a second runthrough of reduction step
+ myJobPersistence.markWorkChunkAsFailed(theChunk.getId(),
+ "Step worker failed to process work chunk " + theChunk.getId());
+ theResponseObject.setSuccessful(false);
+ break;
+ }
+ } catch (Exception e) {
+ String msg = String.format(
+ "Reduction step failed to execute chunk reduction for chunk %s with exception: %s.",
+ theChunk.getId(),
+ e.getMessage()
+ );
+ // we got a failure in a reduction
+ ourLog.error(msg, e);
+ theResponseObject.setSuccessful(false);
+
+ myJobPersistence.markWorkChunkAsFailed(theChunk.getId(), msg);
+ }
+ }
}
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/SynchronizedJobPersistenceWrapper.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/SynchronizedJobPersistenceWrapper.java
index ef5ac4e10f4..7bceba6629e 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/SynchronizedJobPersistenceWrapper.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/SynchronizedJobPersistenceWrapper.java
@@ -29,11 +29,14 @@ import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
import org.springframework.data.domain.Page;
+import org.springframework.data.domain.Pageable;
+import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
+import java.util.stream.Stream;
public class SynchronizedJobPersistenceWrapper implements IJobPersistence {
@@ -66,6 +69,11 @@ public class SynchronizedJobPersistenceWrapper implements IJobPersistence {
return myWrap.fetchInstance(theInstanceId);
}
+ @Override
+ public List fetchInstances(String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable) {
+ return myWrap.fetchInstances(theJobDefinitionId, theStatuses, theCutoff, thePageable);
+ }
+
@Override
public synchronized List fetchInstances(FetchJobInstancesRequest theRequest, int theStart, int theBatchSize) {
return myWrap.fetchInstances(theRequest, theStart, theBatchSize);
@@ -146,6 +154,11 @@ public class SynchronizedJobPersistenceWrapper implements IJobPersistence {
return myWrap.fetchAllWorkChunksForStepIterator(theInstanceId, theStepId);
}
+ @Override
+ public Stream fetchAllWorkChunksForStepStream(String theInstanceId, String theStepId) {
+ return myWrap.fetchAllWorkChunksForStepStream(theInstanceId, theStepId);
+ }
+
@Override
public synchronized boolean updateInstance(JobInstance theInstance) {
return myWrap.updateInstance(theInstance);
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java
index ffaf4028e2b..4434023f5cb 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java
@@ -32,9 +32,11 @@ import ca.uhn.fhir.batch2.model.JobWorkNotificationJsonMessage;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater;
-import ca.uhn.fhir.util.Logs;
+import ca.uhn.fhir.batch2.util.Batch2Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.util.Logs;
import org.apache.commons.lang3.Validate;
+import javax.validation.constraints.NotNull;
import org.slf4j.Logger;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandler;
@@ -42,6 +44,9 @@ import org.springframework.messaging.MessagingException;
import javax.annotation.Nonnull;
import java.util.Optional;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
/**
* This handler receives batch work request messages and performs the batch work requested by the message
@@ -75,17 +80,28 @@ class WorkChannelMessageHandler implements MessageHandler {
String chunkId = workNotification.getChunkId();
Validate.notNull(chunkId);
- Optional chunkOpt = myJobPersistence.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId);
- if (chunkOpt.isEmpty()) {
- ourLog.error("Unable to find chunk with ID {} - Aborting", chunkId);
- return;
+
+ boolean isReductionWorkNotification = Batch2Constants.REDUCTION_STEP_CHUNK_ID_PLACEHOLDER.equals(chunkId);
+
+ JobWorkCursor, ?, ?> cursor = null;
+ WorkChunk workChunk = null;
+ if (!isReductionWorkNotification) {
+ Optional chunkOpt = myJobPersistence.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId);
+ if (chunkOpt.isEmpty()) {
+ ourLog.error("Unable to find chunk with ID {} - Aborting", chunkId);
+ return;
+ }
+ workChunk = chunkOpt.get();
+ ourLog.debug("Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", chunkId, workChunk.getTargetStepId(), workChunk.getStartTime());
+
+ cursor = buildCursorFromNotification(workNotification);
+
+ Validate.isTrue(workChunk.getTargetStepId().equals(cursor.getCurrentStepId()), "Chunk %s has target step %s but expected %s", chunkId, workChunk.getTargetStepId(), cursor.getCurrentStepId());
+ } else {
+ ourLog.debug("Processing reduction step work notification. No associated workchunks.");
+
+ cursor = buildCursorFromNotification(workNotification);
}
- WorkChunk workChunk = chunkOpt.get();
- ourLog.debug("Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", chunkId, workChunk.getTargetStepId(), workChunk.getStartTime());
-
- JobWorkCursor, ?, ?> cursor = buildCursorFromNotification(workNotification);
-
- Validate.isTrue(workChunk.getTargetStepId().equals(cursor.getCurrentStepId()), "Chunk %s has target step %s but expected %s", chunkId, workChunk.getTargetStepId(), cursor.getCurrentStepId());
Optional instanceOpt = myJobPersistence.fetchInstance(workNotification.getInstanceId());
JobInstance instance = instanceOpt.orElseThrow(() -> new InternalErrorException("Unknown instance: " + workNotification.getInstanceId()));
@@ -100,7 +116,27 @@ class WorkChannelMessageHandler implements MessageHandler {
}
JobStepExecutor,?,?> stepExecutor = myJobStepExecutorFactory.newJobStepExecutor(instance, workChunk, cursor);
- stepExecutor.executeStep();
+ // TODO - ls
+ /*
+ * We should change this to actually have
+ * the reduction step take in smaller sets of
+ * lists of chunks from the previous steps (one
+ * at a time still) and compose the
+ * report gradually and in an idempotent way
+ */
+ if (isReductionWorkNotification) {
+ // do async due to long running process
+ // we'll fire off a separate thread and let the job continue
+ ScheduledExecutorService exService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
+ @Override
+ public Thread newThread(@NotNull Runnable r) {
+ return new Thread(r, "Reduction-step-thread");
+ }
+ });
+ exService.execute(stepExecutor::executeStep);
+ } else {
+ stepExecutor.executeStep();
+ }
}
private void markInProgressIfQueued(JobInstance theInstance) {
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java
index aaed2f8148d..4ba661a58fe 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java
@@ -32,13 +32,13 @@ import ca.uhn.fhir.batch2.model.JobDefinitionStep;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobWorkCursor;
import ca.uhn.fhir.batch2.model.WorkChunk;
-import ca.uhn.fhir.util.Logs;
import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.util.Logs;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
+import org.springframework.transaction.PlatformTransactionManager;
import javax.annotation.Nullable;
-
import java.util.Optional;
import static org.apache.commons.lang3.StringUtils.isBlank;
@@ -64,11 +64,12 @@ public class WorkChunkProcessor {
private final ReductionStepExecutor myReductionStepExecutor;
public WorkChunkProcessor(IJobPersistence theJobPersistence,
- BatchJobSender theSender) {
+ BatchJobSender theSender,
+ PlatformTransactionManager theTransactionManager) {
myJobPersistence = theJobPersistence;
myBatchJobSender = theSender;
myStepExecutor = new StepExecutor(theJobPersistence);
- myReductionStepExecutor = new ReductionStepExecutor(theJobPersistence);
+ myReductionStepExecutor = new ReductionStepExecutor(theJobPersistence, theTransactionManager);
}
/**
@@ -102,7 +103,7 @@ public class WorkChunkProcessor {
boolean success = myReductionStepExecutor.executeReductionStep(theInstance, step, inputType, parameters);
if (success) {
- // Now call call the normal step executor
+ // Now call the normal step executor
// the data sink stores the report on the instance (i.e. not chunks).
// Assume the OT (report) data is smaller than the list of all IT data
@@ -113,7 +114,6 @@ public class WorkChunkProcessor {
}
return new JobStepExecutorOutput<>(success, dataSink);
-
} else {
// all other kinds of steps
Validate.notNull(theWorkChunk);
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java
index 03d6320d54e..85c319ec3ef 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java
@@ -35,7 +35,7 @@ import org.slf4j.Logger;
import javax.annotation.Nonnull;
import java.util.Date;
-import static ca.uhn.fhir.batch2.config.Batch2Constants.BATCH_START_DATE;
+import static ca.uhn.fhir.batch2.util.Batch2Constants.BATCH_START_DATE;
public class GenerateRangeChunksStep implements IFirstJobStepWorker {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java
index c253927b0d0..5243f2473cd 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java
@@ -22,7 +22,6 @@ package ca.uhn.fhir.batch2.maintenance;
import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.channel.BatchJobSender;
-import ca.uhn.fhir.batch2.coordinator.JobStepExecutorOutput;
import ca.uhn.fhir.batch2.coordinator.WorkChunkProcessor;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobWorkCursor;
@@ -30,11 +29,11 @@ import ca.uhn.fhir.batch2.model.JobWorkNotification;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.progress.JobInstanceProgressCalculator;
import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater;
+import ca.uhn.fhir.batch2.util.Batch2Constants;
import ca.uhn.fhir.util.Logs;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
-import java.util.Date;
import java.util.List;
public class JobInstanceProcessor {
@@ -125,6 +124,8 @@ public class JobInstanceProcessor {
private void triggerGatedExecutions() {
if (!myInstance.isRunning()) {
+ ourLog.debug("JobInstance {} is not in a \"running\" state. Status {}",
+ myInstance.getInstanceId(), myInstance.getStatus().name());
return;
}
@@ -136,9 +137,12 @@ public class JobInstanceProcessor {
// final step
if (jobWorkCursor.isFinalStep() && !jobWorkCursor.isReductionStep()) {
+ ourLog.debug("Job instance {} is in final step and it's not a reducer step", myInstance.getInstanceId());
return;
}
+ // we should not be sending a second reduction step
+ // to the queue if it's in finalize status
if (jobWorkCursor.isReductionStep() && myInstance.getStatus() == StatusEnum.FINALIZE) {
ourLog.warn("Job instance {} is still finalizing - a second reduction job will not be started.", myInstance.getInstanceId());
return;
@@ -179,16 +183,13 @@ public class JobInstanceProcessor {
myJobPersistence.updateInstance(myInstance);
}
- private void processReductionStep(JobWorkCursor, ?, ?> jobWorkCursor) {
- // do execution of the final step now
- // (ie, we won't send to job workers)
- JobStepExecutorOutput, ?, ?> result = myJobExecutorSvc.doExecution(
- JobWorkCursor.fromJobDefinitionAndRequestedStepId(myInstance.getJobDefinition(), jobWorkCursor.nextStep.getStepId()),
+ private void processReductionStep(JobWorkCursor, ?, ?> theWorkCursor) {
+ JobWorkNotification workNotification = new JobWorkNotification(
myInstance,
- null);
- if (!result.isSuccessful()) {
- myInstance.setEndTime(new Date());
- myJobInstanceStatusUpdater.setFailed(myInstance);
- }
+ theWorkCursor.nextStep.getStepId(),
+ Batch2Constants.REDUCTION_STEP_CHUNK_ID_PLACEHOLDER // chunk id; we don't need it
+ );
+ ourLog.debug("Submitting a Work Notification for a job reduction step. No associated work chunk ids are available.");
+ myBatchJobSender.sendWorkChannelMessage(workNotification);
}
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java
index 2c89769feef..26ec4eb8899 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java
@@ -179,6 +179,16 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc
return myRunMaintenanceSemaphore.getQueueLength();
}
+ @VisibleForTesting
+ public void forceMaintenancePass() {
+ // to simulate a long running job!
+ ourLog.info(
+ "Forcing a maintenance pass run; semaphore at {}",
+ getQueueLength()
+ );
+ doMaintenancePass();
+ }
+
@Override
public void runMaintenancePass() {
if (!myRunMaintenanceSemaphore.tryAcquire()) {
@@ -204,6 +214,7 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc
myJobDefinitionRegistry.setJobDefinition(instance);
JobInstanceProcessor jobInstanceProcessor = new JobInstanceProcessor(myJobPersistence,
myBatchJobSender, instance, progressAccumulator, myJobExecutorSvc);
+ ourLog.debug("Triggering maintenance process for instance {} in status {}", instance.getInstanceId(), instance.getStatus().name());
jobInstanceProcessor.process();
}
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/ChunkOutcome.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/ChunkOutcome.java
index 963c4c43073..20fd16c21f8 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/ChunkOutcome.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/ChunkOutcome.java
@@ -33,7 +33,7 @@ public class ChunkOutcome {
myStatus = theStatus;
}
- public Status getStatuss() {
+ public Status getStatus() {
return myStatus;
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
index 903d80052ef..c1752db433a 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.api.IJobInstance;
import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
import ca.uhn.fhir.jpa.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.util.Logs;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
@@ -32,7 +33,6 @@ import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.Date;
-import java.util.Objects;
import static org.apache.commons.lang3.StringUtils.isBlank;
@@ -359,10 +359,24 @@ public class JobInstance extends JobInstanceStartRequest implements IModelJson,
}
/**
- * Returns true if the job instance is in {@link StatusEnum#IN_PROGRESS} and is not cancelled
+ * Returns true if the job instance is in:
+ * {@link StatusEnum#IN_PROGRESS}
+ * {@link StatusEnum#FINALIZE}
+ * and is not cancelled
*/
public boolean isRunning() {
- return getStatus() == StatusEnum.IN_PROGRESS && !isCancelled();
+ if (isCancelled()) {
+ return false;
+ }
+
+ switch (getStatus()) {
+ case IN_PROGRESS:
+ case FINALIZE:
+ return true;
+ default:
+ Logs.getBatchTroubleshootingLog().debug("Status {} is considered \"not running\"", getStatus().name());
+ }
+ return false;
}
public boolean isFinished() {
@@ -376,7 +390,7 @@ public class JobInstance extends JobInstanceStartRequest implements IModelJson,
}
public boolean isPendingCancellationRequest() {
- return myCancelled && (myStatus == StatusEnum.QUEUED || myStatus == StatusEnum.IN_PROGRESS);
+ return myCancelled && myStatus.isCancellable();
}
/**
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java
index 69b37341829..1b6042bd16f 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/StatusEnum.java
@@ -34,51 +34,53 @@ public enum StatusEnum {
/**
* Task is waiting to execute and should begin with no intervention required.
*/
- QUEUED(true, false),
+ QUEUED(true, false, true),
/**
* Task is current executing
*/
- IN_PROGRESS(true, false),
+ IN_PROGRESS(true, false, true),
/**
* For reduction steps
*/
- FINALIZE(true, false),
+ FINALIZE(true, false, true),
/**
* Task completed successfully
*/
- COMPLETED(false, true),
+ COMPLETED(false, true, false),
/**
* Task execution resulted in an error but the error may be transient (or transient status is unknown).
* Retrying may result in success.
*/
- ERRORED(true, true),
+ ERRORED(true, true, false),
/**
* Task has failed and is known to be unrecoverable. There is no reason to believe that retrying will
* result in a different outcome.
*/
- FAILED(true, true),
+ FAILED(true, true, false),
/**
* Task has been cancelled.
*/
- CANCELLED(true, true);
+ CANCELLED(true, true, false);
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
private final boolean myIncomplete;
private final boolean myEnded;
+ private final boolean myIsCancellable;
private static StatusEnum[] ourIncompleteStatuses;
private static Set ourEndedStatuses;
private static Set ourNotEndedStatuses;
- StatusEnum(boolean theIncomplete, boolean theEnded) {
+ StatusEnum(boolean theIncomplete, boolean theEnded, boolean theIsCancellable) {
myIncomplete = theIncomplete;
myEnded = theEnded;
+ myIsCancellable = theIsCancellable;
}
/**
@@ -186,4 +188,8 @@ public enum StatusEnum {
public boolean isIncomplete() {
return myIncomplete;
}
+
+ public boolean isCancellable() {
+ return myIsCancellable;
+ }
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java
similarity index 82%
rename from hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java
rename to hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java
index 6fd7e29917b..8855c5dee24 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2Constants.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2Constants.java
@@ -1,4 +1,4 @@
-package ca.uhn.fhir.batch2.config;
+package ca.uhn.fhir.batch2.util;
/*-
* #%L
@@ -30,4 +30,10 @@ public class Batch2Constants {
* date when performing operations that pull resources by time windows.
*/
public static final Date BATCH_START_DATE = new InstantType("2000-01-01T00:00:00Z").getValue();
+
+ /**
+ * This is a placeholder chunkid for the reduction step to allow it to be
+ * used in the message handling
+ */
+ public static final String REDUCTION_STEP_CHUNK_ID_PLACEHOLDER = "REDUCTION";
}
diff --git a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImplTest.java b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImplTest.java
index dc6aa312e4b..d5a344ddf36 100644
--- a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImplTest.java
+++ b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImplTest.java
@@ -36,6 +36,7 @@ import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.stubbing.Answer;
import org.springframework.messaging.MessageDeliveryException;
+import org.springframework.transaction.PlatformTransactionManager;
import javax.annotation.Nonnull;
import java.util.Arrays;
@@ -43,7 +44,6 @@ import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@@ -69,6 +69,8 @@ public class JobCoordinatorImplTest extends BaseBatch2Test {
private JobDefinitionRegistry myJobDefinitionRegistry;
@Mock
private IJobMaintenanceService myJobMaintenanceService;
+ @Mock
+ private PlatformTransactionManager myPlatformTransactionManager;
@Captor
private ArgumentCaptor> myStep1ExecutionDetailsCaptor;
@@ -87,7 +89,7 @@ public class JobCoordinatorImplTest extends BaseBatch2Test {
public void beforeEach() {
// The code refactored to keep the same functionality,
// but in this service (so it's a real service here!)
- WorkChunkProcessor jobStepExecutorSvc = new WorkChunkProcessor(myJobInstancePersister, myBatchJobSender);
+ WorkChunkProcessor jobStepExecutorSvc = new WorkChunkProcessor(myJobInstancePersister, myBatchJobSender, myPlatformTransactionManager);
mySvc = new JobCoordinatorImpl(myBatchJobSender, myWorkChannelReceiver, myJobInstancePersister, myJobDefinitionRegistry, jobStepExecutorSvc, myJobMaintenanceService);
}
diff --git a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessorTest.java b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessorTest.java
index 2c6a59e4d64..78c66f88666 100644
--- a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessorTest.java
+++ b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessorTest.java
@@ -31,6 +31,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
+import org.springframework.transaction.PlatformTransactionManager;
import java.util.ArrayList;
import java.util.Arrays;
@@ -104,8 +105,8 @@ public class WorkChunkProcessorTest {
// our test class
private class TestWorkChunkProcessor extends WorkChunkProcessor {
- public TestWorkChunkProcessor(IJobPersistence thePersistence, BatchJobSender theSender) {
- super(thePersistence, theSender);
+ public TestWorkChunkProcessor(IJobPersistence thePersistence, BatchJobSender theSender, PlatformTransactionManager theTransactionManager) {
+ super(thePersistence, theSender, theTransactionManager);
}
@Override
@@ -138,11 +139,14 @@ public class WorkChunkProcessorTest {
@Mock
private BatchJobSender myJobSender;
+ @Mock
+ private PlatformTransactionManager myMockTransactionManager;
+
private TestWorkChunkProcessor myExecutorSvc;
@BeforeEach
public void init() {
- myExecutorSvc = new TestWorkChunkProcessor(myJobPersistence, myJobSender);
+ myExecutorSvc = new TestWorkChunkProcessor(myJobPersistence, myJobSender, myMockTransactionManager);
}
private JobDefinitionStep mockOutWorkCursor(
@@ -197,8 +201,8 @@ public class WorkChunkProcessorTest {
// when
when(workCursor.isReductionStep())
.thenReturn(true);
- when(myJobPersistence.fetchAllWorkChunksForStepIterator(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
- .thenReturn(chunks.iterator());
+ when(myJobPersistence.fetchAllWorkChunksForStepStream(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
+ .thenReturn(chunks.stream());
when(myJobPersistence.markInstanceAsStatus(eq(INSTANCE_ID), eq(StatusEnum.FINALIZE))).thenReturn(true);
when(myReductionStep.consume(any(ChunkExecutionDetails.class)))
.thenReturn(ChunkOutcome.SUCCESS());
@@ -259,8 +263,8 @@ public class WorkChunkProcessorTest {
// when
when(workCursor.isReductionStep())
.thenReturn(true);
- when(myJobPersistence.fetchAllWorkChunksForStepIterator(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
- .thenReturn(chunks.iterator());
+ when(myJobPersistence.fetchAllWorkChunksForStepStream(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
+ .thenReturn(chunks.stream());
when(myJobPersistence.markInstanceAsStatus(eq(INSTANCE_ID), eq(StatusEnum.FINALIZE))).thenReturn(true);
doThrow(new RuntimeException(errorMsg))
.when(myReductionStep).consume(any(ChunkExecutionDetails.class));
@@ -308,8 +312,8 @@ public class WorkChunkProcessorTest {
// when
when(workCursor.isReductionStep())
.thenReturn(true);
- when(myJobPersistence.fetchAllWorkChunksForStepIterator(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
- .thenReturn(chunks.iterator());
+ when(myJobPersistence.fetchAllWorkChunksForStepStream(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
+ .thenReturn(chunks.stream());
when(myJobPersistence.markInstanceAsStatus(eq(INSTANCE_ID), eq(StatusEnum.FINALIZE))).thenReturn(true);
when(myReductionStep.consume(any(ChunkExecutionDetails.class)))
.thenReturn(ChunkOutcome.SUCCESS())
@@ -355,8 +359,8 @@ public class WorkChunkProcessorTest {
when(workCursor.isReductionStep())
.thenReturn(true);
when(myJobPersistence.markInstanceAsStatus(eq(INSTANCE_ID), eq(StatusEnum.FINALIZE))).thenReturn(true);
- when(myJobPersistence.fetchAllWorkChunksForStepIterator(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
- .thenReturn(chunks.iterator());
+ when(myJobPersistence.fetchAllWorkChunksForStepStream(eq(INSTANCE_ID), eq(REDUCTION_STEP_ID)))
+ .thenReturn(chunks.stream());
when(myReductionStep.consume(any(ChunkExecutionDetails.class)))
.thenReturn(ChunkOutcome.SUCCESS())
.thenReturn(new ChunkOutcome(ChunkOutcome.Status.ABORT));
@@ -609,7 +613,7 @@ public class WorkChunkProcessorTest {
verify(myJobPersistence, never())
.markWorkChunksWithStatusAndWipeData(anyString(), anyList(), any(), any());
verify(myJobPersistence, never())
- .fetchAllWorkChunksForStepIterator(anyString(), anyString());
+ .fetchAllWorkChunksForStepStream(anyString(), anyString());
}
private JobInstance getTestJobInstance() {
diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml
index a7c82542be7..b9411fac099 100644
--- a/hapi-fhir-storage-cr/pom.xml
+++ b/hapi-fhir-storage-cr/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml
index acf055b627e..48572e9c1cc 100644
--- a/hapi-fhir-storage-mdm/pom.xml
+++ b/hapi-fhir-storage-mdm/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java
index da55e51734f..9f153bd63ba 100644
--- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java
+++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java
@@ -26,7 +26,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
-import ca.uhn.fhir.batch2.config.Batch2Constants;
+import ca.uhn.fhir.batch2.util.Batch2Constants;
import ca.uhn.fhir.mdm.batch2.clear.MdmClearJobParameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml
index 59972c1685f..c741f54e038 100644
--- a/hapi-fhir-storage-test-utilities/pom.xml
+++ b/hapi-fhir-storage-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessageTest.java b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessageTest.java
index 6912522eaef..af70696d6e9 100644
--- a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessageTest.java
+++ b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessageTest.java
@@ -67,4 +67,15 @@ class BaseJsonMessageTest {
message.setPayload(payload);
assertEquals(RESOURCE_ID, message.getMessageKeyOrNull());
}
+
+ @Test
+ void test_resourceModifiedJsonMessage_getRetryCountOnNullHeaders_willReturnZero() {
+ // Given
+ ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage();
+ message.setHeaders(null);
+ // When
+ HapiMessageHeaders headers = message.getHapiHeaders();
+ // Then
+ assertEquals(0, headers.getRetryCount());
+ }
}
diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml
index b72e1f0a85a..2a096606adf 100644
--- a/hapi-fhir-storage/pom.xml
+++ b/hapi-fhir-storage/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
index 3dacf4d2d19..7a534c007e9 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
@@ -97,6 +97,8 @@ public class DaoConfig {
public static final int DEFAULT_BUNDLE_BATCH_POOL_SIZE = 20; // 1 for single thread
public static final int DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE = 100; // 1 for single thread
public static final int DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY = 200;
+
+ public static final int DEFAULT_BULK_EXPORT_FILE_MAXIMUM_CAPACITY = 1_000;
/**
* Default value for {@link #setMaximumSearchResultCountInTransaction(Integer)}
*
@@ -332,7 +334,7 @@ public class DaoConfig {
/**
* Since 6.2.0
*/
- private int myBulkExportFileMaximumCapacity = 1_000;
+ private int myBulkExportFileMaximumCapacity = DEFAULT_BULK_EXPORT_FILE_MAXIMUM_CAPACITY;
/**
* Since 6.4.0
*/
@@ -1630,7 +1632,7 @@ public class DaoConfig {
*
* For example, if a patient contains a reference to managing organization Organization/FOO
* but FOO is not a valid ID for an organization on the server, the operation will be blocked unless
- * this propery has been set to false
+ * this property has been set to false
*
*
* This property can cause confusing results for clients of the server since searches, includes,
@@ -1648,7 +1650,7 @@ public class DaoConfig {
*
* For example, if a patient contains a reference to managing organization Organization/FOO
* but FOO is not a valid ID for an organization on the server, the operation will be blocked unless
- * this propery has been set to false
+ * this property has been set to false
*
*
* This property can cause confusing results for clients of the server since searches, includes,
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportJobSchedulingHelper.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportJobSchedulingHelper.java
index 6f02bd285d1..dc0535f27ee 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportJobSchedulingHelper.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportJobSchedulingHelper.java
@@ -35,5 +35,6 @@ public interface IBulkDataExportJobSchedulingHelper {
* Stops all invoked jobs, and then purges them.
*/
@Transactional(propagation = Propagation.NEVER)
+ @Deprecated
void cancelAndPurgeAllJobs();
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java
index 4e44a8629da..7dcdddf5c4a 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java
@@ -369,9 +369,10 @@ public class BulkDataExportProvider {
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
response.getWriter().close();
break;
+ default:
+ ourLog.warn("Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", info.getStatus().name());
case BUILDING:
case SUBMITTED:
- default:
if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) {
handleDeleteRequest(theJobId, response, info.getStatus());
} else {
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryMessage.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryMessage.java
index e29755f3730..97d068e982a 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryMessage.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryMessage.java
@@ -128,7 +128,8 @@ public class ResourceDeliveryMessage extends BaseResourceMessage implements IRes
public String toString() {
return new ToStringBuilder(this)
.append("mySubscription", mySubscription)
- .append("myPayloadString", myPayloadString)
+ // it isn't safe to log payloads
+ .append("myPayloadString", "[Not Logged]")
.append("myPayload", myPayloadDecoded)
.append("myPayloadId", myPayloadId)
.append("myPartitionId", myPartitionId)
diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml
index 272dbdcfd74..e97e6d2a274 100644
--- a/hapi-fhir-structures-dstu2.1/pom.xml
+++ b/hapi-fhir-structures-dstu2.1/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml
index f8439d45995..2d2d2105102 100644
--- a/hapi-fhir-structures-dstu2/pom.xml
+++ b/hapi-fhir-structures-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml
index a66018c6136..ca7693f26fb 100644
--- a/hapi-fhir-structures-dstu3/pom.xml
+++ b/hapi-fhir-structures-dstu3/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java
index d2acb997c6e..a4c30f553b7 100644
--- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java
+++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/parser/JsonParserDstu3Test.java
@@ -2217,7 +2217,7 @@ public class JsonParserDstu3Test {
input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"
\"}}";
basic = ourCtx.newJsonParser().parseResource(Basic.class, input);
- assertEquals("", basic.getText().getDivAsString());
+ assertEquals("
", basic.getText().getDivAsString());
input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"
\"}}";
basic = ourCtx.newJsonParser().parseResource(Basic.class, input);
diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml
index bc25b48bd32..7f617a13f4f 100644
--- a/hapi-fhir-structures-hl7org-dstu2/pom.xml
+++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml
index 94509717a42..42be98440b7 100644
--- a/hapi-fhir-structures-r4/pom.xml
+++ b/hapi-fhir-structures-r4/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml
index 74be257e156..e40909134f0 100644
--- a/hapi-fhir-structures-r4b/pom.xml
+++ b/hapi-fhir-structures-r4b/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml
index 65fe915fde1..0a3d52c191d 100644
--- a/hapi-fhir-structures-r5/pom.xml
+++ b/hapi-fhir-structures-r5/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java
index 6da640e9afa..9ff55998532 100644
--- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java
+++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java
@@ -447,4 +447,9 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext
public IWorkerContext setPackageTracker(IWorkerContextManager.IPackageLoadingTracker theIPackageLoadingTracker) {
throw new UnsupportedOperationException(Msg.code(220));
}
+
+ @Override
+ public String getSpecUrl() {
+ throw new UnsupportedOperationException(Msg.code(2260));
+ }
}
diff --git a/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties b/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties
index 54eebc4a1a7..7879ad9a06b 100644
--- a/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties
+++ b/hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties
@@ -122,7 +122,6 @@ resource.Questionnaire=org.hl7.fhir.r5.model.Questionnaire
resource.QuestionnaireResponse=org.hl7.fhir.r5.model.QuestionnaireResponse
resource.RegulatedAuthorization=org.hl7.fhir.r5.model.RegulatedAuthorization
resource.RelatedPerson=org.hl7.fhir.r5.model.RelatedPerson
-resource.RequestGroup=org.hl7.fhir.r5.model.RequestGroup
resource.RequestOrchestration=org.hl7.fhir.r5.model.RequestOrchestration
resource.Requirements=org.hl7.fhir.r5.model.Requirements
resource.ResearchStudy=org.hl7.fhir.r5.model.ResearchStudy
diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml
index 739892aa343..7eafd84b929 100644
--- a/hapi-fhir-test-utilities/pom.xml
+++ b/hapi-fhir-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java
new file mode 100644
index 00000000000..114a15f52c1
--- /dev/null
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/RangeTestHelper.java
@@ -0,0 +1,82 @@
+package ca.uhn.fhir.test.utilities;
+
+/*-
+ * #%L
+ * HAPI FHIR Test Utilities
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.both;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
+public class RangeTestHelper {
+
+ public static final double THOUSANDTH = .001d;
+
+
+ public static void checkInRange(double base, double value) {
+ checkInRange(base, THOUSANDTH, value);
+ }
+
+ public static void checkInRange(double theBase, double theRange, double theValue) {
+ double lowerBound = theBase - theRange;
+ double upperBound = theBase + theRange;
+ checkWithinBounds(lowerBound, upperBound, theValue);
+ }
+
+ public static void checkInRange(String theBase, String theValue) {
+ // ease tests
+ if (theBase == null && theValue == null) {
+ return;
+ }
+
+ double value = Double.parseDouble(theValue);
+ double base = Double.parseDouble(theBase);
+ checkInRange(base, THOUSANDTH, value);
+ }
+
+ public static void checkInRange(String theBase, double theRange, String theValue) {
+ // ease tests
+ if (theBase == null && theValue == null) {
+ return;
+ }
+
+ double value = Double.parseDouble(theValue);
+ double base = Double.parseDouble(theBase);
+ checkInRange(base, theRange, value);
+ }
+
+ public static void checkWithinBounds(double theLowerBound, double theUpperBound, double theValue) {
+ assertThat(theValue, is(both(greaterThanOrEqualTo(theLowerBound)).and(lessThanOrEqualTo(theUpperBound))));
+ }
+
+ public static void checkWithinBounds(String theLowerBound, String theUpperBound, String theValue) {
+ assertNotNull(theLowerBound, "theLowerBound");
+ assertNotNull(theUpperBound, "theUpperBound");
+ assertNotNull(theValue, "theValue");
+ double lowerBound = Double.parseDouble(theLowerBound);
+ double upperBound = Double.parseDouble(theUpperBound);
+ double value = Double.parseDouble(theValue);
+ checkWithinBounds(lowerBound, upperBound, value);
+ }
+
+
+}
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java
index e2b49861a89..a64d97e1ba4 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java
@@ -367,6 +367,9 @@ public class JpaModelScannerAndVerifier {
if (ourReservedWords.contains(theColumnName)) {
throw new IllegalArgumentException(Msg.code(1631) + "Column name is a reserved word: " + theColumnName + " found on " + theElement);
}
+ if (theColumnName.startsWith("_")) {
+ throw new IllegalArgumentException(Msg.code(2272) + "Column name "+ theColumnName +" starts with an '_' (underscore). This is not permitted for oracle field names. Found on " + theElement);
+ }
}
private static int calculateIndexLength(String[] theColumnNames, Map
theColumnNameToLength, String theIndexName) {
diff --git a/hapi-fhir-test-utilities/src/main/resources/ca/uhn/fhir/narrative/narrative-with-fragment.properties b/hapi-fhir-test-utilities/src/main/resources/ca/uhn/fhir/narrative/narrative-with-fragment.properties
index 136bd283980..b8b917c2251 100644
--- a/hapi-fhir-test-utilities/src/main/resources/ca/uhn/fhir/narrative/narrative-with-fragment.properties
+++ b/hapi-fhir-test-utilities/src/main/resources/ca/uhn/fhir/narrative/narrative-with-fragment.properties
@@ -4,6 +4,7 @@ bundle.resourceType = Bundle
bundle.style = thymeleaf
bundle.narrative = classpath:ca/uhn/fhir/narrative/narrative-with-fragment-parent.html
+# Fragment template
fragment1.fragmentName = MyFragment
fragment1.style = thymeleaf
fragment1.narrative = classpath:ca/uhn/fhir/narrative/narrative-with-fragment-child.html
diff --git a/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java b/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java
new file mode 100644
index 00000000000..591444f240b
--- /dev/null
+++ b/hapi-fhir-test-utilities/src/test/java/ca/uhn/fhir/test/utilities/RangeTestHelperTest.java
@@ -0,0 +1,163 @@
+package ca.uhn.fhir.test.utilities;
+
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+class RangeTestHelperTest {
+
+
+ @Nested
+ public class DefaultRange {
+
+ @Test
+ void checkInRange() {
+ RangeTestHelper.checkInRange(.83d, .829999999d);
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(.91, .83)
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(.26, .25)
+ );
+ }
+
+ @Nested
+ public class WithinBounds {
+
+ @Test
+ void checkInRange() {
+ RangeTestHelper.checkWithinBounds(.91001, .91002, .910013);
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkWithinBounds(.91001, .91002, .9013)
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkWithinBounds(.87, .88, .9)
+ );
+ }
+
+ @Nested
+ public class PassingStrings {
+ @Test
+ void checkInRange() {
+ RangeTestHelper.checkWithinBounds(".91001", ".91002", ".910013");
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkWithinBounds(".91001", ".91002", ".9013")
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkWithinBounds(".87", ".88", ".9")
+ );
+ }
+ }
+ }
+
+ @Nested
+ public class PassingStrings {
+
+ @Test
+ void checkInRange() {
+ RangeTestHelper.checkInRange("0.83", "0.829999999");
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(".91", ".83")
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(".26", "0.25")
+ );
+ }
+ }
+
+ }
+
+ @Nested
+ public class ProvidedRange {
+
+ @Test
+ void checkInRange() {
+ // equals to higher bound
+ RangeTestHelper.checkInRange(.83, .1, .83);
+ RangeTestHelper.checkInRange(.831, .02, .833);
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(.84, .01, .82)
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(.2511,.0001, .2513)
+ );
+ }
+
+ @Nested
+ public class PassingStrings {
+
+ @Test
+ void checkInRange() {
+ RangeTestHelper.checkInRange(".82", .01, ".83");
+ RangeTestHelper.checkInRange(".83d", .829999999d, ".8312d");
+ }
+
+ @Test
+ void checkLower() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(".91", .02, ".83")
+ );
+ }
+
+ @Test
+ void checkHigher() {
+ AssertionError thrown = assertThrows(
+ AssertionError.class,
+ () -> RangeTestHelper.checkInRange(".26", .03, "0.3")
+ );
+ }
+ }
+ }
+}
diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml
index 92e68cf0795..7894c345226 100644
--- a/hapi-fhir-testpage-overlay/pom.xml
+++ b/hapi-fhir-testpage-overlay/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml
index 4b971f6c5ad..e3b8da53ffe 100644
--- a/hapi-fhir-validation-resources-dstu2.1/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml
index 5fac1e3d10a..7a95f37b9ca 100644
--- a/hapi-fhir-validation-resources-dstu2/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml
index 7aaa6e4aa3f..18c0bc8892a 100644
--- a/hapi-fhir-validation-resources-dstu3/pom.xml
+++ b/hapi-fhir-validation-resources-dstu3/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml
index 7000646f160..902d40d49b0 100644
--- a/hapi-fhir-validation-resources-r4/pom.xml
+++ b/hapi-fhir-validation-resources-r4/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml
index 96cdc9f8afa..2283b5ba37b 100644
--- a/hapi-fhir-validation-resources-r5/pom.xml
+++ b/hapi-fhir-validation-resources-r5/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml
index a8079c72ef9..9465d75881b 100644
--- a/hapi-fhir-validation/pom.xml
+++ b/hapi-fhir-validation/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java
index 6baac0f80c2..8c6c86a5304 100644
--- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java
+++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java
@@ -171,6 +171,13 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo
throw new UnsupportedOperationException(Msg.code(2266));
}
+ @Override
+ public String getSpecUrl() {
+
+ return "";
+
+ }
+
@Override
public PackageInformation getPackageForUrl(String s) {
throw new UnsupportedOperationException(Msg.code(2236));
diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml
index 4f824bb81c8..f57efeddec6 100644
--- a/hapi-tinder-plugin/pom.xml
+++ b/hapi-tinder-plugin/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml
index d0decbb3444..62328d32bd7 100644
--- a/hapi-tinder-test/pom.xml
+++ b/hapi-tinder-test/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../pom.xml
diff --git a/pom.xml b/pom.xml
index 6a77df38979..46eb4230597 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2,11 +2,12 @@
+
4.0.0
ca.uhn.hapi.fhir
hapi-fhir
pom
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
HAPI-FHIR
An open-source implementation of the FHIR specification in Java.
https://hapifhir.io
@@ -865,10 +866,12 @@
- 5.6.84
+ 5.6.881
1.0.3
-Dfile.encoding=UTF-8 -Xmx2048m
+ 1.0.3
+
yyyy-MM-dd'T'HH:mm:ss'Z'
UTF-8
@@ -903,7 +906,7 @@
3.0.0
4.2.0
3.0.3
- 10.0.12
+ 10.0.13
3.0.2
5.9.1
0.50.40
@@ -1333,7 +1336,7 @@
org.springdoc
springdoc-openapi-ui
- 1.5.13
+ 1.6.14
net.sourceforge.htmlunit
@@ -1869,7 +1872,7 @@
org.postgresql
postgresql
- 42.5.0
+ 42.5.1
org.quartz-scheduler
diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
index aabb0d585f0..37ee9cf3992 100644
--- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
+++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
index 0ccbc522c0a..8a888a812eb 100644
--- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
index 4daf5970442..dca68e8f350 100644
--- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.0-SNAPSHOT
+ 6.5.1-SNAPSHOT
../../pom.xml