val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequestDetails);
- if (val.size() > 1) {
- throw new InvalidRequestException(
- "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
- }
- }
- }
- }
-
- for (IdDt next : theAllIds) {
- IdDt replacement = theIdSubstitutions.get(next);
- if (replacement == null) {
- continue;
- }
- if (replacement.equals(next)) {
- continue;
- }
- ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement);
- }
- }
-
- private String extractTransactionUrlOrThrowException(Entry nextEntry, HTTPVerbEnum verb) {
- String url = nextEntry.getRequest().getUrl();
- if (isBlank(url)) {
- throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionMissingUrl", verb.name()));
- }
- return url;
- }
-
- /**
- * This method is called for nested bundles (e.g. if we received a transaction with an entry that
- * was a GET search, this method is called on the bundle for the search result, that will be placed in the
- * outer bundle). This method applies the _summary and _content parameters to the output of
- * that bundle.
- *
- * TODO: This isn't the most efficient way of doing this.. hopefully we can come up with something better in the future.
- */
- private IBaseResource filterNestedBundle(RequestDetails theRequestDetails, IBaseResource theResource) {
- IParser p = getContext().newJsonParser();
- RestfulServerUtils.configureResponseParser(theRequestDetails, p);
- return p.parseResource(theResource.getClass(), p.encodeResourceToString(theResource));
- }
@Override
public MetaDt metaGetOperation(RequestDetails theRequestDetails) {
@@ -589,31 +49,6 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao {
return retVal;
}
- private IFhirResourceDao extends IBaseResource> toDao(UrlParts theParts, String theVerb, String theUrl) {
- RuntimeResourceDefinition resType;
- try {
- resType = getContext().getResourceDefinition(theParts.getResourceType());
- } catch (DataFormatException e) {
- String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
- throw new InvalidRequestException(msg);
- }
- IFhirResourceDao extends IBaseResource> dao = null;
- if (resType != null) {
- dao = this.myDaoRegistry.getResourceDaoOrNull(resType.getImplementingClass());
- }
- if (dao == null) {
- String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
- throw new InvalidRequestException(msg);
- }
-
- // if (theParts.getResourceId() == null && theParts.getParams() == null) {
- // String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
- // throw new InvalidRequestException(msg);
- // }
-
- return dao;
- }
-
protected MetaDt toMetaDt(Collection tagDefinitions) {
MetaDt retVal = new MetaDt();
for (TagDefinition next : tagDefinitions) {
@@ -632,105 +67,9 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao {
return retVal;
}
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
- if (theRequestDetails != null) {
- ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null);
- notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails);
- }
-
- String actionName = "Transaction";
- return transaction((ServletRequestDetails) theRequestDetails, theRequest, actionName);
- }
-
- private Bundle transaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) {
- markRequestAsProcessingSubRequest(theRequestDetails);
- try {
- return doTransaction(theRequestDetails, theRequest, theActionName);
- } finally {
- clearRequestAsProcessingSubRequest(theRequestDetails);
- }
- }
-
- private static void handleTransactionCreateOrUpdateOutcome(Map idSubstitutions, Map idToPersistedOutcome, IdDt nextResourceId, DaoMethodOutcome outcome,
- Entry newEntry, String theResourceType, IResource theRes) {
- IdDt newId = (IdDt) outcome.getId().toUnqualifiedVersionless();
- IdDt resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless();
- if (newId.equals(resourceId) == false) {
- idSubstitutions.put(resourceId, newId);
- if (isPlaceholder(resourceId)) {
- /*
- * The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient.
- */
- idSubstitutions.put(new IdDt(theResourceType + '/' + resourceId.getValue()), newId);
- }
- }
- idToPersistedOutcome.put(newId, outcome);
- if (outcome.getCreated().booleanValue()) {
- newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_201_CREATED));
- } else {
- newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK));
- }
- newEntry.getResponse().setLastModified(ResourceMetadataKeyEnum.UPDATED.get(theRes));
- }
-
- private static boolean isPlaceholder(IdDt theId) {
- if (theId.getValue() != null) {
- return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:");
- }
- return false;
- }
-
- private static String toStatusString(int theStatusCode) {
- return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
- }
-
@Override
public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
-
- /**
- * Transaction Order, per the spec:
- *
- * Process any DELETE interactions
- * Process any POST interactions
- * Process any PUT interactions
- * Process any GET interactions
- */
- public class TransactionSorter implements Comparator {
-
- @Override
- public int compare(Entry theO1, Entry theO2) {
- int o1 = toOrder(theO1);
- int o2 = toOrder(theO2);
-
- return o1 - o2;
- }
-
- private int toOrder(Entry theO1) {
- int o1 = 0;
- if (theO1.getRequest().getMethodElement().getValueAsEnum() != null) {
- switch (theO1.getRequest().getMethodElement().getValueAsEnum()) {
- case DELETE:
- o1 = 1;
- break;
- case POST:
- o1 = 2;
- break;
- case PUT:
- o1 = 3;
- break;
- case GET:
- o1 = 4;
- break;
- }
- }
- return o1;
- }
-
- }
-
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java
new file mode 100644
index 00000000000..b1b87a079e1
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java
@@ -0,0 +1,171 @@
+package ca.uhn.fhir.jpa.dao;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.model.api.IResource;
+import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
+import ca.uhn.fhir.model.dstu2.resource.Bundle;
+import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
+import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
+import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
+import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum;
+import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum;
+import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.hl7.fhir.exceptions.FHIRException;
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+import java.util.Date;
+import java.util.List;
+
+public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter {
+ @Override
+ public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) {
+ theBundleEntry.getResponse().setStatus(theStatus);
+ }
+
+ @Override
+ public void setResponseLastModified(Bundle.Entry theBundleEntry, Date theLastModified) {
+ theBundleEntry.getResponse().setLastModified(theLastModified, TemporalPrecisionEnum.MILLI);
+ }
+
+ @Override
+ public void setResource(Bundle.Entry theBundleEntry, IBaseResource theResource) {
+ theBundleEntry.setResource((IResource) theResource);
+ }
+
+ @Override
+ public IBaseResource getResource(Bundle.Entry theBundleEntry) {
+ return theBundleEntry.getResource();
+ }
+
+ @Override
+ public String getBundleType(Bundle theRequest) {
+ if (theRequest.getType() == null) {
+ return null;
+ }
+ return theRequest.getTypeElement().getValue();
+ }
+
+ @Override
+ public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) {
+ OperationOutcome oo = new OperationOutcome();
+ oo.addIssue()
+ .setSeverity(IssueSeverityEnum.ERROR)
+ .setDiagnostics(theCaughtEx.getMessage())
+ .setCode(IssueTypeEnum.EXCEPTION);
+ theEntry.setResource(oo);
+ }
+
+ @Override
+ public Bundle createBundle(String theBundleType) {
+ Bundle resp = new Bundle();
+ try {
+ resp.setType(BundleTypeEnum.forCode(theBundleType));
+ } catch (FHIRException theE) {
+ throw new InternalErrorException("Unknown bundle type: " + theBundleType);
+ }
+ return resp;
+ }
+
+ @Override
+ public List getEntries(Bundle theRequest) {
+ return theRequest.getEntry();
+ }
+
+ @Override
+ public void addEntry(Bundle theBundle, Bundle.Entry theEntry) {
+ theBundle.addEntry(theEntry);
+ }
+
+ @Override
+ public Bundle.Entry addEntry(Bundle theBundle) {
+ return theBundle.addEntry();
+ }
+
+ @Override
+ public String getEntryRequestVerb(FhirContext theContext, Bundle.Entry theEntry) {
+ String retVal = null;
+ HTTPVerbEnum value = theEntry.getRequest().getMethodElement().getValueAsEnum();
+ if (value != null) {
+ retVal = value.getCode();
+ }
+ return retVal;
+ }
+
+ @Override
+ public String getFullUrl(Bundle.Entry theEntry) {
+ return theEntry.getFullUrl();
+ }
+
+ @Override
+ public String getEntryIfNoneExist(Bundle.Entry theEntry) {
+ return theEntry.getRequest().getIfNoneExist();
+ }
+
+ @Override
+ public String getEntryRequestUrl(Bundle.Entry theEntry) {
+ return theEntry.getRequest().getUrl();
+ }
+
+ @Override
+ public void setResponseLocation(Bundle.Entry theEntry, String theResponseLocation) {
+ theEntry.getResponse().setLocation(theResponseLocation);
+ }
+
+ @Override
+ public void setResponseETag(Bundle.Entry theEntry, String theEtag) {
+ theEntry.getResponse().setEtag(theEtag);
+ }
+
+ @Override
+ public String getEntryRequestIfMatch(Bundle.Entry theEntry) {
+ return theEntry.getRequest().getIfMatch();
+ }
+
+ @Override
+ public String getEntryRequestIfNoneExist(Bundle.Entry theEntry) {
+ return theEntry.getRequest().getIfNoneExist();
+ }
+
+ @Override
+ public String getEntryRequestIfNoneMatch(Bundle.Entry theEntry) {
+ return theEntry.getRequest().getIfNoneMatch();
+ }
+
+ @Override
+ public void setResponseOutcome(Bundle.Entry theEntry, IBaseOperationOutcome theOperationOutcome) {
+ theEntry.setResource((IResource) theOperationOutcome);
+ }
+
+ @Override
+ public void setRequestVerb(Bundle.Entry theEntry, String theVerb) {
+ theEntry.getRequest().setMethod(HTTPVerbEnum.forCode(theVerb));
+ }
+
+ @Override
+ public void setRequestUrl(Bundle.Entry theEntry, String theUrl) {
+ theEntry.getRequest().setUrl(theUrl);
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java
index 708425d5fdb..c5bf0ddf606 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java
@@ -1,6 +1,6 @@
package ca.uhn.fhir.jpa.dao.data;
-import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
@@ -38,13 +38,13 @@ public interface IBulkExportJobDao extends JpaRepository findByJobId(@Param("jobid") String theUuid);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status")
- Slice findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted);
+ Slice findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
Slice findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
- Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus);
+ Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus);
@Modifying
@Query("DELETE FROM BulkExportJobEntity t")
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java
new file mode 100644
index 00000000000..dccaa953eb8
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java
@@ -0,0 +1,40 @@
+package ca.uhn.fhir.jpa.dao.data;
+
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Slice;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import java.util.Optional;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public interface IBulkImportJobDao extends JpaRepository {
+
+ @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myJobId = :jobid")
+ Optional findByJobId(@Param("jobid") String theUuid);
+
+ @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myStatus = :status")
+ Slice findByStatus(Pageable thePage, @Param("status") BulkImportJobStatusEnum theStatus);
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java
new file mode 100644
index 00000000000..c53e49f95a4
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java
@@ -0,0 +1,43 @@
+package ca.uhn.fhir.jpa.dao.data;
+
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
+import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import java.util.List;
+import java.util.Optional;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public interface IBulkImportJobFileDao extends JpaRepository {
+
+ @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
+ List findAllForJob(@Param("jobId") String theJobId);
+
+ @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex")
+ Optional findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex);
+
+ @Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
+ List findAllIdsForJob(@Param("jobId") String theJobId);
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java
index 0af11e3a082..96019ae6a21 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java
@@ -22,18 +22,13 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
-import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import org.hl7.fhir.dstu3.model.Bundle;
-import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.instance.model.api.IBaseBundle;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery;
@@ -42,14 +37,10 @@ import java.util.List;
public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao {
- @Autowired
- private TransactionProcessor myTransactionProcessor;
-
@Override
@PostConstruct
public void start() {
super.start();
- myTransactionProcessor.setDao(this);
}
@Override
@@ -88,12 +79,5 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
- return myTransactionProcessor.transaction(theRequestDetails, theRequest);
- }
-
-
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
index 5da2372c9b6..e012ee235ad 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
@@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
+import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude;
@@ -123,6 +125,8 @@ public class ExpungeEverythingService {
counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class));
counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class));
counter.addAndGet(expungeEverythingByType(SearchParamPresent.class));
+ counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class));
+ counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class));
counter.addAndGet(expungeEverythingByType(ForcedId.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class));
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java
index 04baaca4922..a369f3d7e5f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java
@@ -22,42 +22,20 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
-import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
-import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Meta;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
-import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery;
import java.util.Collection;
import java.util.List;
public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao {
- @Autowired
- private TransactionProcessor myTransactionProcessor;
-
- @VisibleForTesting
- public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
- myTransactionProcessor = theTransactionProcessor;
- }
-
- @Override
- @PostConstruct
- public void start() {
- super.start();
- myTransactionProcessor.setDao(this);
- }
-
-
@Override
public Meta metaGetOperation(RequestDetails theRequestDetails) {
// Notify interceptors
@@ -95,10 +73,4 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao {
return retVal;
}
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
- return myTransactionProcessor.transaction(theRequestDetails, theRequest);
- }
-
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java
index 9d13bae6d1e..919d831e4a6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java
@@ -22,20 +22,14 @@ package ca.uhn.fhir.jpa.dao.r5;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
-import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r5.model.Bundle;
-import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r5.model.Meta;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
-import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery;
import java.util.Collection;
import java.util.List;
@@ -44,17 +38,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class);
- @Autowired
- private TransactionProcessor myTransactionProcessor;
-
- @Override
- @PostConstruct
- public void start() {
- super.start();
- myTransactionProcessor.setDao(this);
- }
-
-
@Override
public Meta metaGetOperation(RequestDetails theRequestDetails) {
// Notify interceptors
@@ -92,10 +75,5 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao {
return retVal;
}
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
- return myTransactionProcessor.transaction(theRequestDetails, theRequest);
- }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
index 05f68783fa7..f2f8a092715 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.r5.model.InstantType;
@@ -51,9 +51,9 @@ import static org.apache.commons.lang3.StringUtils.left;
@Entity
@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
- @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
+ @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
}, indexes = {
- @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
+ @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
})
public class BulkExportJobEntity implements Serializable {
@@ -70,7 +70,7 @@ public class BulkExportJobEntity implements Serializable {
@Enumerated(EnumType.STRING)
@Column(name = "JOB_STATUS", length = 10, nullable = false)
- private BulkJobStatusEnum myStatus;
+ private BulkExportJobStatusEnum myStatus;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "CREATED_TIME", nullable = false)
private Date myCreated;
@@ -156,11 +156,11 @@ public class BulkExportJobEntity implements Serializable {
return b.toString();
}
- public BulkJobStatusEnum getStatus() {
+ public BulkExportJobStatusEnum getStatus() {
return myStatus;
}
- public void setStatus(BulkJobStatusEnum theStatus) {
+ public void setStatus(BulkExportJobStatusEnum theStatus) {
if (myStatus != theStatus) {
myStatusTime = new Date();
myStatus = theStatus;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
new file mode 100644
index 00000000000..b7de7e9cc7b
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
@@ -0,0 +1,157 @@
+package ca.uhn.fhir.jpa.entity;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+import javax.persistence.UniqueConstraint;
+import javax.persistence.Version;
+import java.io.Serializable;
+import java.util.Date;
+
+import static org.apache.commons.lang3.StringUtils.left;
+
+@Entity
+@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = {
+ @UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")
+})
+public class BulkImportJobEntity implements Serializable {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID")
+ @SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID")
+ @Column(name = "PID")
+ private Long myId;
+
+ @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false)
+ private String myJobId;
+ @Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
+ private String myJobDescription;
+ @Enumerated(EnumType.STRING)
+ @Column(name = "JOB_STATUS", length = 10, nullable = false)
+ private BulkImportJobStatusEnum myStatus;
+ @Version
+ @Column(name = "OPTLOCK", nullable = false)
+ private int myVersion;
+ @Column(name = "FILE_COUNT", nullable = false)
+ private int myFileCount;
+ @Temporal(TemporalType.TIMESTAMP)
+ @Column(name = "STATUS_TIME", nullable = false)
+ private Date myStatusTime;
+ @Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
+ private String myStatusMessage;
+ @Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false)
+ @Enumerated(EnumType.STRING)
+ private JobFileRowProcessingModeEnum myRowProcessingMode;
+ @Column(name = "BATCH_SIZE", nullable = false, updatable = false)
+ private int myBatchSize;
+
+ public String getJobDescription() {
+ return myJobDescription;
+ }
+
+ public void setJobDescription(String theJobDescription) {
+ myJobDescription = left(theJobDescription, BulkExportJobEntity.STATUS_MESSAGE_LEN);
+ }
+
+ public JobFileRowProcessingModeEnum getRowProcessingMode() {
+ return myRowProcessingMode;
+ }
+
+ public void setRowProcessingMode(JobFileRowProcessingModeEnum theRowProcessingMode) {
+ myRowProcessingMode = theRowProcessingMode;
+ }
+
+ public Date getStatusTime() {
+ return myStatusTime;
+ }
+
+ public void setStatusTime(Date theStatusTime) {
+ myStatusTime = theStatusTime;
+ }
+
+ public int getFileCount() {
+ return myFileCount;
+ }
+
+ public void setFileCount(int theFileCount) {
+ myFileCount = theFileCount;
+ }
+
+ public String getJobId() {
+ return myJobId;
+ }
+
+ public void setJobId(String theJobId) {
+ myJobId = theJobId;
+ }
+
+ public BulkImportJobStatusEnum getStatus() {
+ return myStatus;
+ }
+
+ /**
+ * Sets the status, updates the status time, and clears the status message
+ */
+ public void setStatus(BulkImportJobStatusEnum theStatus) {
+ if (myStatus != theStatus) {
+ myStatus = theStatus;
+ setStatusTime(new Date());
+ setStatusMessage(null);
+ }
+ }
+
+ public String getStatusMessage() {
+ return myStatusMessage;
+ }
+
+ public void setStatusMessage(String theStatusMessage) {
+ myStatusMessage = left(theStatusMessage, BulkExportJobEntity.STATUS_MESSAGE_LEN);
+ }
+
+ public BulkImportJobJson toJson() {
+ return new BulkImportJobJson()
+ .setProcessingMode(getRowProcessingMode())
+ .setFileCount(getFileCount())
+ .setJobDescription(getJobDescription());
+ }
+
+ public int getBatchSize() {
+ return myBatchSize;
+ }
+
+ public void setBatchSize(int theBatchSize) {
+ myBatchSize = theBatchSize;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java
new file mode 100644
index 00000000000..b1dd778a2c8
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java
@@ -0,0 +1,104 @@
+package ca.uhn.fhir.jpa.entity;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.ForeignKey;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.Index;
+import javax.persistence.JoinColumn;
+import javax.persistence.Lob;
+import javax.persistence.ManyToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+import java.io.Serializable;
+import java.nio.charset.StandardCharsets;
+
+@Entity
+@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = {
+ @Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID")
+})
+public class BulkImportJobFileEntity implements Serializable {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID")
+ @SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID")
+ @Column(name = "PID")
+ private Long myId;
+
+ @ManyToOne
+ @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB"))
+ private BulkImportJobEntity myJob;
+
+ @Column(name = "FILE_SEQ", nullable = false)
+ private int myFileSequence;
+
+ @Lob
+ @Column(name = "JOB_CONTENTS", nullable = false)
+ private byte[] myContents;
+
+ @Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH)
+ private String myTenantName;
+
+ public BulkImportJobEntity getJob() {
+ return myJob;
+ }
+
+ public void setJob(BulkImportJobEntity theJob) {
+ myJob = theJob;
+ }
+
+ public int getFileSequence() {
+ return myFileSequence;
+ }
+
+ public void setFileSequence(int theFileSequence) {
+ myFileSequence = theFileSequence;
+ }
+
+ public String getContents() {
+ return new String(myContents, StandardCharsets.UTF_8);
+ }
+
+ public void setContents(String theContents) {
+ myContents = theContents.getBytes(StandardCharsets.UTF_8);
+ }
+
+
+ public BulkImportJobFileJson toJson() {
+ return new BulkImportJobFileJson()
+ .setContents(getContents())
+ .setTenantName(getTenantName());
+ }
+
+ public void setTenantName(String theTenantName) {
+ myTenantName = theTenantName;
+ }
+
+ public String getTenantName() {
+ return myTenantName;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java
new file mode 100644
index 00000000000..f3e6a6a130d
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java
@@ -0,0 +1,58 @@
+package ca.uhn.fhir.jpa.bulk;
+
+import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobInstance;
+import org.springframework.batch.core.explore.JobExplorer;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static org.awaitility.Awaitility.await;
+import static org.junit.jupiter.api.Assertions.fail;
+
+public class BaseBatchJobR4Test extends BaseJpaR4Test {
+
+ private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class);
+ @Autowired
+ private JobExplorer myJobExplorer;
+
+ protected List awaitAllBulkJobCompletions(String... theJobNames) {
+ assert theJobNames.length > 0;
+
+ List bulkExport = new ArrayList<>();
+ for (String nextName : theJobNames) {
+ bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100));
+ }
+ if (bulkExport.isEmpty()) {
+ List wantNames = Arrays.asList(theJobNames);
+ List haveNames = myJobExplorer.getJobNames();
+ fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames);
+ }
+ List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
+ awaitJobCompletions(bulkExportExecutions);
+
+ return bulkExportExecutions;
+ }
+
+ protected void awaitJobCompletions(Collection theJobs) {
+ theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
+ }
+
+ protected void awaitJobCompletion(JobExecution theJobExecution) {
+ await().atMost(120, TimeUnit.SECONDS).until(() -> {
+ JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
+ ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions());
+ return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
+ });
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
index ede41a213e8..2c216b9074d 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
@@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
-import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
-import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
-import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
-import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider;
+import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
@@ -188,7 +188,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID)
- .setStatus(BulkJobStatusEnum.BUILDING)
+ .setStatus(BulkExportJobStatusEnum.BUILDING)
.setStatusTime(InstantType.now().getValue());
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
@@ -212,7 +212,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID)
- .setStatus(BulkJobStatusEnum.ERROR)
+ .setStatus(BulkExportJobStatusEnum.ERROR)
.setStatusTime(InstantType.now().getValue())
.setStatusMessage("Some Error Message");
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
@@ -239,7 +239,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID)
- .setStatus(BulkJobStatusEnum.COMPLETE)
+ .setStatus(BulkExportJobStatusEnum.COMPLETE)
.setStatusTime(InstantType.now().getValue());
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111"));
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222"));
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java
index 58fd8a39f5b..fdc92d090e9 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java
@@ -6,15 +6,14 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
-import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
-import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder;
-import ca.uhn.fhir.jpa.bulk.job.GroupBulkExportJobParametersBuilder;
-import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder;
+import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder;
+import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
-import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
@@ -46,28 +45,22 @@ import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import java.util.Arrays;
-import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
-import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
@@ -78,7 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
-public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
+public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
public static final String TEST_FILTER = "Patient?gender=female";
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
@@ -92,8 +85,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
- @Autowired
- private JobExplorer myJobExplorer;
@Autowired
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
@@ -128,7 +119,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue();
BulkExportJobEntity job = new BulkExportJobEntity();
- job.setStatus(BulkJobStatusEnum.COMPLETE);
+ job.setStatus(BulkExportJobStatusEnum.COMPLETE);
job.setExpiry(DateUtils.addHours(new Date(), -1));
job.setJobId(UUID.randomUUID().toString());
job.setCreated(new Date());
@@ -241,6 +232,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
return options;
}
+
@Test
public void testSubmit_ReusesExisting() {
@@ -278,7 +270,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
// Run a scheduled pass to build the export
myBulkDataExportSvc.buildExportFiles();
@@ -287,7 +279,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.ERROR, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.ERROR, status.getStatus());
assertThat(status.getStatusMessage(), containsString("help i'm a bug"));
} finally {
@@ -295,6 +287,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
}
+ private void awaitAllBulkJobCompletions() {
+ awaitAllBulkJobCompletions(
+ BatchJobsConfig.BULK_EXPORT_JOB_NAME,
+ BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME,
+ BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME
+ );
+ }
+
@Test
public void testGenerateBulkExport_SpecificResources() {
@@ -313,7 +313,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_FILTER), status.getRequest());
// Run a scheduled pass to build the export
@@ -323,7 +323,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
// Iterate over the files
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
@@ -368,7 +368,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson", status.getRequest());
// Run a scheduled pass to build the export
@@ -378,7 +378,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(5, status.getFiles().size());
// Iterate over the files
@@ -393,7 +393,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} else if ("Observation".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(26, nextContents.split("\n").length);
- }else if ("Immunization".equals(next.getResourceType())) {
+ } else if ("Immunization".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(26, nextContents.split("\n").length);
} else if ("CareTeam".equals(next.getResourceType())) {
@@ -428,7 +428,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(5));
}
@@ -451,7 +451,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Patient&_typeFilter=Patient%3F_has%3AObservation%3Apatient%3Aidentifier%3DSYS%7CVAL3", status.getRequest());
// Run a scheduled pass to build the export
@@ -461,7 +461,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(1, status.getFiles().size());
// Iterate over the files
@@ -481,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
@Test
- public void testGenerateBulkExport_WithSince() throws InterruptedException {
+ public void testGenerateBulkExport_WithSince() {
// Create some resources to load
createResources();
@@ -508,7 +508,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest());
// Run a scheduled pass to build the export
@@ -518,7 +518,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
+ assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(1, status.getFiles().size());
// Iterate over the files
@@ -560,24 +560,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID");
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID);
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2));
}
- public void awaitAllBulkJobCompletions() {
- List bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100);
- bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, 0, 100));
- bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100));
- if (bulkExport.isEmpty()) {
- fail("There are no bulk export jobs running!");
- }
- List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
- awaitJobCompletions(bulkExportExecutions);
- }
-
- public void awaitJobCompletions(Collection theJobs) {
- theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
- }
@Test
public void testBatchJobSubmitsAndRuns() throws Exception {
@@ -599,13 +585,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitJobCompletion(jobExecution);
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2));
}
@Test
- public void testGroupBatchJobWorks() throws Exception {
+ public void testGroupBatchJobWorks() {
createResources();
// Create a bulk job
@@ -625,7 +611,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -639,8 +625,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
assertThat(nextContents, is(containsString("IMM6")));
assertThat(nextContents, is(containsString("IMM8")));
}
+
@Test
- public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() throws Exception {
+ public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() {
createResources();
// Create a bulk job
@@ -659,7 +646,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -716,7 +703,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitJobCompletion(jobExecution);
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -747,7 +734,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null
@Test
- public void testGroupBatchJobCareTeam() throws Exception {
+ public void testGroupBatchJobCareTeam() {
createResources();
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
@@ -766,7 +753,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam")));
@@ -810,7 +797,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -847,7 +834,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation")));
String nextContents = getBinaryContents(jobInfo, 0);
@@ -888,7 +875,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient")));
@@ -900,7 +887,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
@Test
- public void testMdmExpansionWorksForGroupExportOnMatchedPatients() throws JobParametersInvalidException {
+ public void testMdmExpansionWorksForGroupExportOnMatchedPatients() {
createResources();
// Create a bulk job
@@ -918,9 +905,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest());
+ assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId + "&_mdm=true", jobInfo.getRequest());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -963,7 +950,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
@Test
- public void testGroupBulkExportSupportsTypeFilters() throws JobParametersInvalidException {
+ public void testGroupBulkExportSupportsTypeFilters() {
createResources();
//Only get COVID-19 vaccinations
@@ -985,7 +972,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
+ assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@@ -1021,7 +1008,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
+ assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
//Group-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
@@ -1030,7 +1017,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
+ assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
//System-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
@@ -1038,7 +1025,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
- assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
+ assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
}
@Test
@@ -1077,14 +1064,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
- private void awaitJobCompletion(JobExecution theJobExecution) {
- await().atMost(120, TimeUnit.SECONDS).until(() -> {
- JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
- ourLog.info("JobExecution {} currently has status: {}", theJobExecution.getId(), jobExecution.getStatus());
- return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
- });
- }
-
private void createResources() {
Group group = new Group();
group.setId("G0");
@@ -1109,7 +1088,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
linkToGoldenResource(goldenPid, sourcePid);
//Only add half the patients to the group.
- if (i % 2 == 0 ) {
+ if (i % 2 == 0) {
group.addMember().setEntity(new Reference(patId));
}
@@ -1119,7 +1098,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
createCareTeamWithIndex(i, patId);
}
- myPatientGroupId = myGroupDao.update(group).getId();
+ myPatientGroupId = myGroupDao.update(group).getId();
//Manually create another golden record
Patient goldenPatient2 = new Patient();
@@ -1153,8 +1132,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE);
patient.addName().setFamily("FAM" + i);
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i);
- DaoMethodOutcome patientOutcome = myPatientDao.update(patient);
- return patientOutcome;
+ return myPatientDao.update(patient);
}
private void createCareTeamWithIndex(int i, IIdType patId) {
@@ -1167,7 +1145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private void createImmunizationWithIndex(int i, IIdType patId) {
Immunization immunization = new Immunization();
immunization.setId("IMM" + i);
- if (patId != null ) {
+ if (patId != null) {
immunization.setPatient(new Reference(patId));
}
if (i % 2 == 0) {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java
new file mode 100644
index 00000000000..dcee246154c
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java
@@ -0,0 +1,155 @@
+package ca.uhn.fhir.jpa.bulk.imprt.svc;
+
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
+import ca.uhn.fhir.jpa.bulk.BaseBatchJobR4Test;
+import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
+import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
+import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
+import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.test.utilities.ITestDataBuilder;
+import ca.uhn.fhir.util.BundleBuilder;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.ArgumentCaptor;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Nonnull;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDataBuilder {
+
+ @Autowired
+ private IBulkDataImportSvc mySvc;
+ @Autowired
+ private IBulkImportJobDao myBulkImportJobDao;
+ @Autowired
+ private IBulkImportJobFileDao myBulkImportJobFileDao;
+
+ @AfterEach
+ public void after() {
+ myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof IAnonymousInterceptor);
+ }
+
+ @Test
+ public void testFlow_TransactionRows() {
+ int transactionsPerFile = 10;
+ int fileCount = 10;
+ List files = createInputFiles(transactionsPerFile, fileCount);
+
+ BulkImportJobJson job = new BulkImportJobJson();
+ job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ job.setJobDescription("This is the description");
+ job.setBatchSize(3);
+ String jobId = mySvc.createNewJob(job, files);
+ mySvc.markJobAsReadyForActivation(jobId);
+
+ boolean activateJobOutcome = mySvc.activateNextReadyJob();
+ assertTrue(activateJobOutcome);
+
+ List executions = awaitAllBulkJobCompletions();
+ assertEquals(1, executions.size());
+ assertEquals("This is the description", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION));
+
+ runInTransaction(() -> {
+ List jobs = myBulkImportJobDao.findAll();
+ assertEquals(0, jobs.size());
+
+ List jobFiles = myBulkImportJobFileDao.findAll();
+ assertEquals(0, jobFiles.size());
+
+ });
+
+ IBundleProvider searchResults = myPatientDao.search(SearchParameterMap.newSynchronous());
+ assertEquals(transactionsPerFile * fileCount, searchResults.sizeOrThrowNpe());
+
+ }
+
+ @Test
+ public void testFlow_WithTenantNamesInInput() {
+ int transactionsPerFile = 5;
+ int fileCount = 10;
+ List files = createInputFiles(transactionsPerFile, fileCount);
+ for (int i = 0; i < fileCount; i++) {
+ files.get(i).setTenantName("TENANT" + i);
+ }
+
+ IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class);
+ myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, interceptor);
+
+ BulkImportJobJson job = new BulkImportJobJson();
+ job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ job.setBatchSize(5);
+ String jobId = mySvc.createNewJob(job, files);
+ mySvc.markJobAsReadyForActivation(jobId);
+
+ boolean activateJobOutcome = mySvc.activateNextReadyJob();
+ assertTrue(activateJobOutcome);
+
+ awaitAllBulkJobCompletions();
+
+ ArgumentCaptor paramsCaptor = ArgumentCaptor.forClass(HookParams.class);
+ verify(interceptor, times(50)).invoke(any(), paramsCaptor.capture());
+ List tenantNames = paramsCaptor
+ .getAllValues()
+ .stream()
+ .map(t -> t.get(RequestDetails.class).getTenantId())
+ .distinct()
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(tenantNames, containsInAnyOrder(
+ "TENANT0", "TENANT1", "TENANT2", "TENANT3", "TENANT4", "TENANT5", "TENANT6", "TENANT7", "TENANT8", "TENANT9"
+ ));
+ }
+
+
+ @Nonnull
+ private List createInputFiles(int transactionsPerFile, int fileCount) {
+ List files = new ArrayList<>();
+ for (int fileIndex = 0; fileIndex < fileCount; fileIndex++) {
+ StringBuilder fileContents = new StringBuilder();
+
+ for (int transactionIdx = 0; transactionIdx < transactionsPerFile; transactionIdx++) {
+ BundleBuilder bundleBuilder = new BundleBuilder(myFhirCtx);
+ IBaseResource patient = buildPatient(withFamily("FAM " + fileIndex + " " + transactionIdx));
+ bundleBuilder.addTransactionCreateEntry(patient);
+ fileContents.append(myFhirCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(bundleBuilder.getBundle()));
+ fileContents.append("\n");
+ }
+
+ BulkImportJobFileJson nextFile = new BulkImportJobFileJson();
+ nextFile.setContents(fileContents.toString());
+ files.add(nextFile);
+ }
+ return files;
+ }
+
+ protected List awaitAllBulkJobCompletions() {
+ return awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME);
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java
new file mode 100644
index 00000000000..5bc80f28024
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java
@@ -0,0 +1,145 @@
+package ca.uhn.fhir.jpa.bulk.imprt.svc;
+
+import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
+import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
+import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
+import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
+import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
+import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
+import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
+import com.google.common.collect.Lists;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.blankString;
+import static org.hamcrest.Matchers.not;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class BulkDataImportSvcImplTest extends BaseJpaR4Test {
+
+ @Autowired
+ private IBulkDataImportSvc mySvc;
+ @Autowired
+ private IBulkImportJobDao myBulkImportJobDao;
+ @Autowired
+ private IBulkImportJobFileDao myBulkImportJobFileDao;
+
+ @Test
+ public void testCreateNewJob() {
+
+ // Create job
+ BulkImportJobJson job = new BulkImportJobJson();
+ job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ job.setBatchSize(3);
+ BulkImportJobFileJson file1 = new BulkImportJobFileJson();
+ file1.setContents("contents 1");
+ BulkImportJobFileJson file2 = new BulkImportJobFileJson();
+ file2.setContents("contents 2");
+ String jobId = mySvc.createNewJob(job, Lists.newArrayList(file1, file2));
+ assertThat(jobId, not(blankString()));
+
+ // Add file
+ BulkImportJobFileJson file3 = new BulkImportJobFileJson();
+ file3.setContents("contents 3");
+ mySvc.addFilesToJob(jobId, Lists.newArrayList(file3));
+
+ runInTransaction(() -> {
+ List jobs = myBulkImportJobDao.findAll();
+ assertEquals(1, jobs.size());
+ assertEquals(jobId, jobs.get(0).getJobId());
+ assertEquals(3, jobs.get(0).getFileCount());
+ assertEquals(BulkImportJobStatusEnum.STAGING, jobs.get(0).getStatus());
+
+ List files = myBulkImportJobFileDao.findAllForJob(jobId);
+ assertEquals(3, files.size());
+
+ });
+ }
+
+ @Test
+ public void testCreateNewJob_InvalidJob_NoContents() {
+ BulkImportJobJson job = new BulkImportJobJson();
+ job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ job.setBatchSize(3);
+ BulkImportJobFileJson file1 = new BulkImportJobFileJson();
+ try {
+ mySvc.createNewJob(job, Lists.newArrayList(file1));
+ } catch (UnprocessableEntityException e) {
+ assertEquals("Job File Contents mode must not be null", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testCreateNewJob_InvalidJob_NoProcessingMode() {
+ BulkImportJobJson job = new BulkImportJobJson();
+ job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ job.setBatchSize(3);
+ BulkImportJobFileJson file1 = new BulkImportJobFileJson();
+ file1.setContents("contents 1");
+ try {
+ mySvc.createNewJob(job, Lists.newArrayList(file1));
+ } catch (UnprocessableEntityException e) {
+ assertEquals("Job File Processing mode must not be null", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testAddFilesToJob_InvalidId() {
+ BulkImportJobFileJson file3 = new BulkImportJobFileJson();
+ file3.setContents("contents 3");
+ try {
+ mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
+ } catch (InvalidRequestException e) {
+ assertEquals("Unknown job ID: ABCDEFG", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testAddFilesToJob_WrongStatus() {
+ runInTransaction(() -> {
+ BulkImportJobEntity entity = new BulkImportJobEntity();
+ entity.setFileCount(1);
+ entity.setJobId("ABCDEFG");
+ entity.setStatus(BulkImportJobStatusEnum.RUNNING);
+ entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ myBulkImportJobDao.save(entity);
+ });
+
+ BulkImportJobFileJson file3 = new BulkImportJobFileJson();
+ file3.setContents("contents 3");
+ try {
+ mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
+ } catch (InvalidRequestException e) {
+ assertEquals("Job ABCDEFG has status RUNNING and can not be added to", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testActivateJob() {
+ runInTransaction(() -> {
+ BulkImportJobEntity entity = new BulkImportJobEntity();
+ entity.setFileCount(1);
+ entity.setJobId("ABCDEFG");
+ entity.setStatus(BulkImportJobStatusEnum.STAGING);
+ entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
+ myBulkImportJobDao.save(entity);
+ });
+
+ mySvc.markJobAsReadyForActivation("ABCDEFG");
+
+ runInTransaction(() -> {
+ List jobs = myBulkImportJobDao.findAll();
+ assertEquals(1, jobs.size());
+ assertEquals(BulkImportJobStatusEnum.READY, jobs.get(0).getStatus());
+ });
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
index 8f22d6dda97..83c339af0bf 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
@@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.entity.TermConcept;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java
index 63f61b740b1..301f5476e7a 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java
@@ -85,7 +85,7 @@ public class TransactionProcessorTest {
.setUrl("/MedicationKnowledge");
try {
- myTransactionProcessor.transaction(null, input);
+ myTransactionProcessor.transaction(null, input, false);
fail();
} catch (InvalidRequestException e) {
assertEquals("Resource MedicationKnowledge is not supported on this server. Supported resource types: []", e.getMessage());
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
index 483e0874f02..5992f9df117 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
@@ -8,7 +8,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSubscription;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestDstu2Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java
index 9cd366f3b1e..e4803f648d5 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java
@@ -785,7 +785,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
// try {
Bundle resp = mySystemDao.transaction(mySrd, request);
assertEquals(1, resp.getEntry().size());
- assertEquals("404 Not Found", resp.getEntry().get(0).getResponse().getStatus());
+ assertEquals("204 No Content", resp.getEntry().get(0).getResponse().getStatus());
// fail();
// } catch (ResourceNotFoundException e) {
@@ -1159,11 +1159,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
}
assertEquals("201 Created", resp.getEntry().get(2).getResponse().getStatus());
assertThat(resp.getEntry().get(2).getResponse().getLocation(), startsWith("Patient/"));
- if (pass == 0) {
- assertEquals("404 Not Found", resp.getEntry().get(3).getResponse().getStatus());
- } else {
- assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus());
- }
+ assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus());
Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource();
assertEquals(1, respGetBundle.getEntry().size());
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
index 109a59cdad0..0953336ff66 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
@@ -13,7 +13,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestDstu3Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
index bf257e1f5ed..9cb19833adf 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
@@ -17,7 +17,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@@ -51,24 +51,19 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
-import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
-import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
-import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
-import ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderObservationR4;
import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
-import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
@@ -77,7 +72,6 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
-import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
@@ -95,11 +89,9 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import ca.uhn.fhir.util.ClasspathUtil;
-import ca.uhn.fhir.util.ResourceUtil;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult;
-import org.apache.commons.io.IOUtils;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
@@ -168,7 +160,6 @@ import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
@@ -182,7 +173,6 @@ import org.springframework.transaction.PlatformTransactionManager;
import javax.persistence.EntityManager;
import java.io.IOException;
-import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
index 7e71ceab8ad..79904f2d15f 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
@@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
index 4f0eb913e03..d01756a7265 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
@@ -7,7 +7,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java
index c5ba2df8890..c4b556bc6ed 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java
@@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
index 24951ce3060..a7d4f7a5278 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
@@ -18,6 +18,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
@@ -60,10 +61,10 @@ import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ValueSet;
-import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
@@ -109,8 +110,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
public void after() {
myDaoConfig.setAllowInlineMatchUrlReferences(false);
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
- myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
- }
+ myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
+ }
@BeforeEach
public void beforeDisableResultReuse() {
@@ -549,7 +550,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myValueSetDao.create(vs, mySrd);
sleepUntilTimeChanges();
-
+
ResourceTable entity = new TransactionTemplate(myTxManager).execute(t -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong()));
assertEquals(Long.valueOf(1), entity.getIndexStatus());
@@ -568,9 +569,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
* so it indexes the newest resource one more time. It wouldn't be a big deal
* if this ever got fixed so that it ends up with 2 instead of 3.
*/
- runInTransaction(()->{
+ runInTransaction(() -> {
Optional reindexCount = myResourceReindexJobDao.getReindexCount(jobId);
- assertEquals(3, reindexCount.orElseThrow(()->new NullPointerException("No job " + jobId)).intValue());
+ assertEquals(3, reindexCount.orElseThrow(() -> new NullPointerException("No job " + jobId)).intValue());
});
// Try making the resource unparseable
@@ -626,7 +627,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
- runInTransaction(()->{
+ runInTransaction(() -> {
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3);
assertNotNull(historyEntry);
myResourceHistoryTableDao.delete(historyEntry);
@@ -656,7 +657,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
- runInTransaction(()->{
+ runInTransaction(() -> {
myEntityManager
.createQuery("UPDATE ResourceIndexedSearchParamString s SET s.myHashNormalizedPrefix = 0")
.executeUpdate();
@@ -671,7 +672,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myResourceReindexingSvc.markAllResourcesForReindexing();
myResourceReindexingSvc.forceReindexingPass();
- runInTransaction(()->{
+ runInTransaction(() -> {
ResourceIndexedSearchParamString param = myResourceIndexedSearchParamStringDao.findAll()
.stream()
.filter(t -> t.getParamName().equals("family"))
@@ -694,7 +695,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
- runInTransaction(()->{
+ runInTransaction(() -> {
Long i = myEntityManager
.createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class)
.getSingleResult();
@@ -714,7 +715,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myResourceReindexingSvc.markAllResourcesForReindexing();
myResourceReindexingSvc.forceReindexingPass();
- runInTransaction(()->{
+ runInTransaction(() -> {
Long i = myEntityManager
.createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class)
.getSingleResult();
@@ -808,6 +809,30 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus());
}
+
+ @Test
+ public void testNestedTransaction_ReadsBlocked() {
+ String methodName = "testTransactionBatchWithFailingRead";
+ Bundle request = new Bundle();
+ request.setType(BundleType.TRANSACTION);
+
+ Patient p = new Patient();
+ p.addName().setFamily(methodName);
+ request.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
+
+ request.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient?identifier=foo");
+
+ try {
+ runInTransaction(()->{
+ mySystemDao.transactionNested(mySrd, request);
+ });
+ fail();
+ } catch (InvalidRequestException e) {
+ assertEquals("Can not invoke read operation on nested transaction", e.getMessage());
+ }
+ }
+
+
@Test
public void testTransactionBatchWithFailingRead() {
String methodName = "testTransactionBatchWithFailingRead";
@@ -923,8 +948,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Bundle outcome = mySystemDao.transaction(mySrd, request);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus());
- assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
- assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
+ assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
+ assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
validate(outcome);
}
@@ -942,8 +967,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Bundle outcome = mySystemDao.transaction(mySrd, request);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus());
- assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
- assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
+ assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
+ assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
validate(outcome);
}
@@ -2272,7 +2297,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
patient2.addIdentifier().setSystem("urn:system").setValue("testPersistWithSimpleLinkP02");
request.addEntry().setResource(patient2).getRequest().setMethod(HTTPVerb.POST);
- assertThrows(InvalidRequestException.class, ()->{
+ assertThrows(InvalidRequestException.class, () -> {
mySystemDao.transaction(mySrd, request);
});
}
@@ -3198,9 +3223,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
assertEquals("1", id2.getVersionIdPart());
assertEquals(id.getValue(), id2.getValue());
-
+
}
-
+
@Test
public void testTransactionWithIfMatch() {
Patient p = new Patient();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
index 8d40288bc2b..b9192bf7b0f 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
@@ -16,7 +16,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR5Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java
index 7680e88baec..66ac0d415cb 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java
@@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
+import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml
index b4436c319ec..a08e196fe26 100644
--- a/hapi-fhir-jpaserver-batch/pom.xml
+++ b/hapi-fhir-jpaserver-batch/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java
new file mode 100644
index 00000000000..4224e215332
--- /dev/null
+++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java
@@ -0,0 +1,32 @@
+package ca.uhn.fhir.jpa.batch;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch Task Processor
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public class BatchConstants {
+
+ /**
+ * Non instantiable
+ */
+ private BatchConstants() {}
+
+ public static final String JOB_LAUNCHING_TASK_EXECUTOR = "jobLaunchingTaskExecutor";
+
+}
diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java
index 936eb9d12ab..27eb2518893 100644
--- a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java
+++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java
@@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch.config;
* #L%
*/
+import ca.uhn.fhir.jpa.batch.BatchConstants;
import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean;
@@ -39,7 +40,7 @@ public class NonPersistedBatchConfigurer extends DefaultBatchConfigurer {
private PlatformTransactionManager myHapiPlatformTransactionManager;
@Autowired
- @Qualifier("jobLaunchingTaskExecutor")
+ @Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
private TaskExecutor myTaskExecutor;
private MapJobRepositoryFactoryBean myJobRepositoryFactory;
diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml
index 440cd4e30e4..20069c8fa7e 100644
--- a/hapi-fhir-jpaserver-cql/pom.xml
+++ b/hapi-fhir-jpaserver-cql/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
@@ -144,13 +144,13 @@
ca.uhn.hapi.fhir
hapi-fhir-test-utilities
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
test
ca.uhn.hapi.fhir
hapi-fhir-jpaserver-test-utilities
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
test
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index 236fc33d3ab..3295a49c94a 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
@@ -55,13 +55,13 @@
ca.uhn.hapi.fhir
hapi-fhir-test-utilities
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
test
ca.uhn.hapi.fhir
hapi-fhir-jpaserver-test-utilities
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
test
diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml
index ab2347ac99c..68316dcf0db 100644
--- a/hapi-fhir-jpaserver-migrate/pom.xml
+++ b/hapi-fhir-jpaserver-migrate/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index 4e14b8594d7..e3975350931 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks;
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
@@ -91,6 +92,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.1", "INDEX_STATUS").nullable().type(ColumnTypeEnum.LONG);
version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB);
version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.3", "SOURCE_PID").nullable().type(ColumnTypeEnum.LONG);
+
+ // Bulk Import Job
+ Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID");
+ blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
+ blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH);
+ blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10);
+ blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
+ blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500);
+ blkImportJobTable.addColumn("OPTLOCK").nonNullable().type(ColumnTypeEnum.INT);
+ blkImportJobTable.addColumn("FILE_COUNT").nonNullable().type(ColumnTypeEnum.INT);
+ blkImportJobTable.addColumn("ROW_PROCESSING_MODE").nonNullable().type(ColumnTypeEnum.STRING, 20);
+ blkImportJobTable.addColumn("BATCH_SIZE").nonNullable().type(ColumnTypeEnum.INT);
+ blkImportJobTable.addIndex("20210410.2", "IDX_BLKIM_JOB_ID").unique(true).withColumns("JOB_ID");
+ version.addIdGenerator("20210410.3", "SEQ_BLKIMJOB_PID");
+
+ // Bulk Import Job File
+ Builder.BuilderAddTableByColumns blkImportJobFileTable = version.addTableByColumns("20210410.4", "HFJ_BLK_IMPORT_JOBFILE", "PID");
+ blkImportJobFileTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
+ blkImportJobFileTable.addColumn("JOB_PID").nonNullable().type(ColumnTypeEnum.LONG);
+ blkImportJobFileTable.addColumn("JOB_CONTENTS").nonNullable().type(ColumnTypeEnum.BLOB);
+ blkImportJobFileTable.addColumn("FILE_SEQ").nonNullable().type(ColumnTypeEnum.INT);
+ blkImportJobFileTable.addColumn("TENANT_NAME").nullable().type(ColumnTypeEnum.STRING, 200);
+ blkImportJobFileTable.addIndex("20210410.5", "IDX_BLKIM_JOBFILE_JOBID").unique(false).withColumns("JOB_PID");
+ blkImportJobFileTable.addForeignKey("20210410.6", "FK_BLKIMJOBFILE_JOB").toColumn("JOB_PID").references("HFJ_BLK_IMPORT_JOB", "PID");
+ version.addIdGenerator("20210410.7", "SEQ_BLKIMJOBFILE_PID");
+
}
private void init530() {
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 8c915ff27c6..b46518bed86 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index 0cccf93a45b..5dc3b7d17f4 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 633843691b3..fc862aea074 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index e00e23537da..b66f8e9615e 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index ffbe7a1164c..183e9a6b602 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
@@ -164,7 +164,7 @@
ca.uhn.hapi.fhir
hapi-fhir-converter
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
index e2c52f3b321..a7e2c06d36a 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
+++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
@@ -6,7 +6,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
-import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider;
+import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.provider.DiffProvider;
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index 6f7664a8f48..adc4be9c0ab 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index 2524d2960e8..1e71fc77421 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 58483804fd3..d0905db3a66 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index bb0460f5f2c..919e895664c 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
hapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index ec97e7f6b0f..622de27723a 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
hapi-fhir-spring-boot-sample-client-okhttp
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index b20cdc12fac..ad0f6ed0cfb 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
hapi-fhir-spring-boot-sample-server-jersey
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index 1643433e6ef..02e9d448d79 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
hapi-fhir-spring-boot-samples
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index 04d4fe39150..c210826be2c 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index b3490ba702b..38c1789454d 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml
index d80fff6935b..c8054db0e2b 100644
--- a/hapi-fhir-structures-dstu2.1/pom.xml
+++ b/hapi-fhir-structures-dstu2.1/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml
index 412d69d642c..6659bcd46c2 100644
--- a/hapi-fhir-structures-dstu2/pom.xml
+++ b/hapi-fhir-structures-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java
index d1acfeb7369..0bb1e0e3f31 100644
--- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java
+++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java
@@ -1,7 +1,5 @@
package ca.uhn.fhir.narrative;
-import ca.uhn.fhir.util.TestUtil;
-import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -9,60 +7,54 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
public class BaseThymeleafNarrativeGeneratorDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseThymeleafNarrativeGeneratorDstu2Test.class);
- @AfterAll
- public static void afterClassClearContext() {
- TestUtil.clearAllStaticFieldsForUnitTest();
- }
-
-
@Test
public void testTrimWhitespace() {
//@formatter:off
- String input = "\n" +
- " \n" +
- "
\n" +
- " \n" +
- " \n" +
- " Identifier | \n" +
- " 123456 | \n" +
- "
\n" +
- " \n" +
- " Address | \n" +
- " \n" +
- " \n" +
- " 123 Fake Street \n" +
- " \n" +
- " \n" +
- " Unit 1 \n" +
- " \n" +
- " Toronto\n" +
- " ON\n" +
- " Canada\n" +
- " | \n" +
- "
\n" +
- " \n" +
- " Date of birth | \n" +
- " \n" +
- " 31 March 2014\n" +
- " | \n" +
- "
\n" +
- " \n" +
- "
\n" +
- "
";
+ String input = "\n" +
+ " \n" +
+ "
\n" +
+ " \n" +
+ " \n" +
+ " Identifier | \n" +
+ " 123456 | \n" +
+ "
\n" +
+ " \n" +
+ " Address | \n" +
+ " \n" +
+ " \n" +
+ " 123 Fake Street \n" +
+ " \n" +
+ " \n" +
+ " Unit 1 \n" +
+ " \n" +
+ " Toronto\n" +
+ " ON\n" +
+ " Canada\n" +
+ " | \n" +
+ "
\n" +
+ " \n" +
+ " Date of birth | \n" +
+ " \n" +
+ " 31 March 2014\n" +
+ " | \n" +
+ "
\n" +
+ " \n" +
+ "
\n" +
+ "
";
//@formatter:on
String actual = BaseThymeleafNarrativeGenerator.cleanWhitespace(input);
String expected = "Identifier | 123456 |
Address | 123 Fake Street Unit 1 TorontoONCanada |
Date of birth | 31 March 2014 |
";
-
+
ourLog.info(actual);
-
+
assertEquals(expected, actual);
}
diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java
index c6709a2128b..a3742c8ea60 100644
--- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java
+++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java
@@ -1,9 +1,9 @@
package ca.uhn.fhir.narrative;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.dstu2.resource.Practitioner;
-import ca.uhn.fhir.util.TestUtil;
-import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
@@ -13,20 +13,19 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorDstu2Test.class);
- private static FhirContext ourCtx = FhirContext.forDstu2();
+ private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2);
- @AfterAll
- public static void afterClassClearContext() {
- TestUtil.clearAllStaticFieldsForUnitTest();
+ @AfterEach
+ public void after() {
+ myCtx.setNarrativeGenerator(null);
}
-
@Test
public void testGenerator() {
// CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("file:src/test/resources/narrative/customnarrative.properties");
CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customnarrative_dstu2.properties");
- ourCtx.setNarrativeGenerator(gen);
+ myCtx.setNarrativeGenerator(gen);
Practitioner p = new Practitioner();
p.addIdentifier().setSystem("sys").setValue("val1");
@@ -34,7 +33,7 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test {
p.addAddress().addLine("line1").addLine("line2");
p.getName().addFamily("fam1").addGiven("given");
- gen.populateResourceNarrative(ourCtx, p);
+ gen.populateResourceNarrative(myCtx, p);
String actual = p.getText().getDiv().getValueAsString();
ourLog.info(actual);
diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java
index 4b5015d9f5b..58beb7f4a0b 100644
--- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java
+++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.narrative;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt;
import ca.uhn.fhir.model.dstu2.composite.QuantityDt;
@@ -22,6 +23,7 @@ import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.util.TestUtil;
import org.hamcrest.core.StringContains;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -35,7 +37,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class DefaultThymeleafNarrativeGeneratorDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu2Test.class);
- private static FhirContext ourCtx = FhirContext.forDstu2();
+ private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2);
private DefaultThymeleafNarrativeGenerator myGen;
@BeforeEach
@@ -43,9 +45,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
myGen = new DefaultThymeleafNarrativeGenerator();
myGen.setUseHapiServerConformanceNarrative(true);
- ourCtx.setNarrativeGenerator(myGen);
+ myCtx.setNarrativeGenerator(myGen);
}
+ @AfterEach
+ public void after() {
+ myCtx.setNarrativeGenerator(null);
+ }
+
+
@Test
public void testGeneratePatient() throws DataFormatException {
Patient value = new Patient();
@@ -57,7 +65,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
value.setBirthDate(new Date(), TemporalPrecisionEnum.DAY);
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
assertThat(output, StringContains.containsString(""));
@@ -69,7 +77,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
Parameters value = new Parameters();
value.setId("123");
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
assertThat(output, not(containsString("narrative")));
@@ -89,9 +97,9 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
" \n" +
"";
- OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse);
+ OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse);
- myGen.populateResourceNarrative(ourCtx, oo);
+ myGen.populateResourceNarrative(myCtx, oo);
String output = oo.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -129,7 +137,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
value.addResult().setResource(obs);
}
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -137,7 +145,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
// Now try it with the parser
- output = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value);
+ output = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value);
ourLog.info(output);
assertThat(output, StringContains.containsString(""));
}
@@ -154,7 +162,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
mp.setStatus(MedicationOrderStatusEnum.ACTIVE);
mp.setDateWritten(new DateTimeDt("2014-09-01"));
- myGen.populateResourceNarrative(ourCtx, mp);
+ myGen.populateResourceNarrative(myCtx, mp);
String output = mp.getText().getDiv().getValueAsString();
assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output);
@@ -167,7 +175,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test {
Medication med = new Medication();
med.getCode().setText("ciproflaxin");
- myGen.populateResourceNarrative(ourCtx, med);
+ myGen.populateResourceNarrative(myCtx, med);
String output = med.getText().getDiv().getValueAsString();
assertThat(output, containsString("ciproflaxin"));
diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml
index 24f6bfd53d9..04fc5a8cbc3 100644
--- a/hapi-fhir-structures-dstu3/pom.xml
+++ b/hapi-fhir-structures-dstu3/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
index 9d99461d525..c5dceef79ac 100644
--- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
+++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.narrative;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.collections.Transformer;
@@ -11,6 +12,7 @@ import org.hl7.fhir.dstu3.model.DiagnosticReport.DiagnosticReportStatus;
import org.hl7.fhir.dstu3.model.MedicationRequest.MedicationRequestStatus;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -28,7 +30,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class DefaultThymeleafNarrativeGeneratorDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu3Test.class);
- private static FhirContext ourCtx = FhirContext.forDstu3();
+ private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU3);
private DefaultThymeleafNarrativeGenerator myGen;
@BeforeEach
@@ -36,9 +38,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
myGen = new DefaultThymeleafNarrativeGenerator();
myGen.setUseHapiServerConformanceNarrative(true);
- ourCtx.setNarrativeGenerator(myGen);
+ myCtx.setNarrativeGenerator(myGen);
}
+ @AfterEach
+ public void after() {
+ myCtx.setNarrativeGenerator(null);
+ }
+
+
@Test
public void testGeneratePatient() throws DataFormatException {
Patient value = new Patient();
@@ -51,7 +59,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
value.setBirthDate(new Date());
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
assertThat(output, StringContains.containsString(""));
@@ -95,7 +103,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
}
});
- customGen.populateResourceNarrative(ourCtx, value);
+ customGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
assertThat(output, StringContains.containsString("Some beautiful proze"));
@@ -111,7 +119,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
value.addResult().setReference("Observation/2");
value.addResult().setReference("Observation/3");
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -133,13 +141,13 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
"";
//@formatter:on
- OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse);
+ OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse);
// String output = gen.generateTitle(oo);
// ourLog.info(output);
// assertEquals("Operation Outcome (2 issues)", output);
- myGen.populateResourceNarrative(ourCtx, oo);
+ myGen.populateResourceNarrative(myCtx, oo);
String output = oo.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -177,7 +185,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
value.addResult().setResource(obs);
}
- myGen.populateResourceNarrative(ourCtx, value);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -240,8 +248,8 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
" }";
- DiagnosticReport value = ourCtx.newJsonParser().parseResource(DiagnosticReport.class, input);
- myGen.populateResourceNarrative(ourCtx, value);
+ DiagnosticReport value = myCtx.newJsonParser().parseResource(DiagnosticReport.class, input);
+ myGen.populateResourceNarrative(myCtx, value);
String output = value.getText().getDiv().getValueAsString();
ourLog.info(output);
@@ -261,7 +269,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
mp.setStatus(MedicationRequestStatus.ACTIVE);
mp.setAuthoredOnElement(new DateTimeType("2014-09-01"));
- myGen.populateResourceNarrative(ourCtx, mp);
+ myGen.populateResourceNarrative(myCtx, mp);
String output = mp.getText().getDiv().getValueAsString();
assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output);
@@ -274,7 +282,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
Medication med = new Medication();
med.getCode().setText("ciproflaxin");
- myGen.populateResourceNarrative(ourCtx, med);
+ myGen.populateResourceNarrative(myCtx, med);
String output = med.getText().getDiv().getValueAsString();
assertThat(output, containsString("ciproflaxin"));
diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml
index 035dabec631..8383bdda70a 100644
--- a/hapi-fhir-structures-hl7org-dstu2/pom.xml
+++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml
index 67dd6dfeb59..e0793555c76 100644
--- a/hapi-fhir-structures-r4/pom.xml
+++ b/hapi-fhir-structures-r4/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java
index 6b355c7332e..957551d03c5 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java
@@ -1,11 +1,13 @@
package ca.uhn.fhir.narrative;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
@@ -14,86 +16,89 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
public class CustomThymeleafNarrativeGeneratorR4Test {
- private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class);
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class);
- /** Don't use cached here since we modify the context */
- private FhirContext myCtx = FhirContext.forR4();
+ /**
+ * Don't use cached here since we modify the context
+ */
+ private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
- /**
- * Implement narrative for standard type
- */
- @Test
- public void testStandardType() {
+ @AfterEach
+ public void after() {
+ myCtx.setNarrativeGenerator(null);
+ }
- CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties");
- myCtx.setNarrativeGenerator(gen);
+ /**
+ * Implement narrative for standard type
+ */
+ @Test
+ public void testStandardType() {
- Practitioner p = new Practitioner();
- p.addIdentifier().setSystem("sys").setValue("val1");
- p.addIdentifier().setSystem("sys").setValue("val2");
- p.addAddress().addLine("line1").addLine("line2");
- p.addName().setFamily("fam1").addGiven("given");
+ CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties");
+ myCtx.setNarrativeGenerator(gen);
- gen.populateResourceNarrative(myCtx, p);
+ Practitioner p = new Practitioner();
+ p.addIdentifier().setSystem("sys").setValue("val1");
+ p.addIdentifier().setSystem("sys").setValue("val2");
+ p.addAddress().addLine("line1").addLine("line2");
+ p.addName().setFamily("fam1").addGiven("given");
- String actual = p.getText().getDiv().getValueAsString();
- ourLog.info(actual);
+ gen.populateResourceNarrative(myCtx, p);
- assertThat(actual, containsString("Name
given FAM1
Address
line1
line2
"));
+ String actual = p.getText().getDiv().getValueAsString();
+ ourLog.info(actual);
- }
+ assertThat(actual, containsString("Name
given FAM1
Address
line1
line2
"));
- @Test
- public void testCustomType() {
+ }
- CustomPatient patient = new CustomPatient();
- patient.setActive(true);
- FavouritePizzaExtension parentExtension = new FavouritePizzaExtension();
- parentExtension.setToppings(new StringType("Mushrooms, Onions"));
- parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches"));
- patient.setFavouritePizza(parentExtension);
+ @Test
+ public void testCustomType() {
- String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient);
- ourLog.info("Encoded: {}", output);
+ CustomPatient patient = new CustomPatient();
+ patient.setActive(true);
+ FavouritePizzaExtension parentExtension = new FavouritePizzaExtension();
+ parentExtension.setToppings(new StringType("Mushrooms, Onions"));
+ parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches"));
+ patient.setFavouritePizza(parentExtension);
- String expectedEncoding = "{\n" +
- " \"resourceType\": \"Patient\",\n" +
- " \"meta\": {\n" +
- " \"profile\": [ \"http://custom_patient\" ]\n" +
- " },\n" +
- " \"extension\": [ {\n" +
- " \"url\": \"http://example.com/favourite_pizza\",\n" +
- " \"extension\": [ {\n" +
- " \"url\": \"toppings\",\n" +
- " \"valueString\": \"Mushrooms, Onions\"\n" +
- " }, {\n" +
- " \"url\": \"size\",\n" +
- " \"valueQuantity\": {\n" +
- " \"value\": 14,\n" +
- " \"unit\": \"Inches\",\n" +
- " \"system\": \"http://unitsofmeasure\",\n" +
- " \"code\": \"[in_i]\"\n" +
- " }\n" +
- " } ]\n" +
- " } ],\n" +
- " \"active\": true\n" +
- "}";
- assertEquals(expectedEncoding, output);
+ String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient);
+ ourLog.info("Encoded: {}", output);
- CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties");
- myCtx.setNarrativeGenerator(gen);
- gen.populateResourceNarrative(myCtx, patient);
+ String expectedEncoding = "{\n" +
+ " \"resourceType\": \"Patient\",\n" +
+ " \"meta\": {\n" +
+ " \"profile\": [ \"http://custom_patient\" ]\n" +
+ " },\n" +
+ " \"extension\": [ {\n" +
+ " \"url\": \"http://example.com/favourite_pizza\",\n" +
+ " \"extension\": [ {\n" +
+ " \"url\": \"toppings\",\n" +
+ " \"valueString\": \"Mushrooms, Onions\"\n" +
+ " }, {\n" +
+ " \"url\": \"size\",\n" +
+ " \"valueQuantity\": {\n" +
+ " \"value\": 14,\n" +
+ " \"unit\": \"Inches\",\n" +
+ " \"system\": \"http://unitsofmeasure\",\n" +
+ " \"code\": \"[in_i]\"\n" +
+ " }\n" +
+ " } ]\n" +
+ " } ],\n" +
+ " \"active\": true\n" +
+ "}";
+ assertEquals(expectedEncoding, output);
- String actual = patient.getText().getDiv().getValueAsString();
- ourLog.info(actual);
+ CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties");
+ myCtx.setNarrativeGenerator(gen);
+ gen.populateResourceNarrative(myCtx, patient);
- String expected = "CustomPatient
Favourite Pizza
Toppings: Mushrooms, Onions Size: 14 ";
- assertEquals(expected, actual);
+ String actual = patient.getText().getDiv().getValueAsString();
+ ourLog.info(actual);
- }
+ String expected = "CustomPatient
Favourite Pizza
Toppings: Mushrooms, Onions Size: 14 ";
+ assertEquals(expected, actual);
+
+ }
- @AfterAll
- public static void afterClassClearContext() {
- TestUtil.clearAllStaticFieldsForUnitTest();
- }
}
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java
index 433b448a448..4847d922123 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java
@@ -10,6 +10,7 @@ import org.hl7.fhir.r4.model.DiagnosticReport.DiagnosticReportStatus;
import org.hl7.fhir.r4.model.MedicationRequest.MedicationRequestStatus;
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -22,7 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class DefaultThymeleafNarrativeGeneratorR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorR4Test.class);
- private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
+ private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
private DefaultThymeleafNarrativeGenerator myGen;
@BeforeEach
@@ -33,6 +34,11 @@ public class DefaultThymeleafNarrativeGeneratorR4Test {
myCtx.setNarrativeGenerator(myGen);
}
+ @AfterEach
+ public void after() {
+ myCtx.setNarrativeGenerator(null);
+ }
+
@Test
public void testGeneratePatient() throws DataFormatException {
Patient value = new Patient();
diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml
index 8760ce684e3..f564ed044b8 100644
--- a/hapi-fhir-structures-r5/pom.xml
+++ b/hapi-fhir-structures-r5/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml
index 654c33c28fe..ac4521d10e5 100644
--- a/hapi-fhir-test-utilities/pom.xml
+++ b/hapi-fhir-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
index ee503727c3e..61563e7ccb4 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
@@ -154,7 +154,7 @@ public interface ITestDataBuilder {
}
}
- default IBaseResource buildResource(String theResourceType, Consumer[] theModifiers) {
+ default IBaseResource buildResource(String theResourceType, Consumer... theModifiers) {
IBaseResource resource = getFhirContext().getResourceDefinition(theResourceType).newInstance();
for (Consumer next : theModifiers) {
next.accept(resource);
diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml
index 1dfb066f20b..0a140bc05ab 100644
--- a/hapi-fhir-testpage-overlay/pom.xml
+++ b/hapi-fhir-testpage-overlay/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml
index 6aae3187f80..4db0f72591a 100644
--- a/hapi-fhir-validation-resources-dstu2.1/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml
index ecdbbd4a635..a3b7462547d 100644
--- a/hapi-fhir-validation-resources-dstu2/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml
index 560106316d0..33d67b4e94a 100644
--- a/hapi-fhir-validation-resources-dstu3/pom.xml
+++ b/hapi-fhir-validation-resources-dstu3/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml
index 68ae7002bdd..5f830ffce69 100644
--- a/hapi-fhir-validation-resources-r4/pom.xml
+++ b/hapi-fhir-validation-resources-r4/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml
index 9ac87f4b79d..dbc5e9359b8 100644
--- a/hapi-fhir-validation-resources-r5/pom.xml
+++ b/hapi-fhir-validation-resources-r5/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml
index b180c3de905..aafc58b15ba 100644
--- a/hapi-fhir-validation/pom.xml
+++ b/hapi-fhir-validation/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml
index 5e946ea683b..6daf487888f 100644
--- a/hapi-tinder-plugin/pom.xml
+++ b/hapi-tinder-plugin/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
@@ -58,37 +58,37 @@
ca.uhn.hapi.fhir
hapi-fhir-structures-dstu3
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-structures-hl7org-dstu2
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-structures-r4
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-structures-r5
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-validation-resources-dstu2
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-validation-resources-dstu3
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
ca.uhn.hapi.fhir
hapi-fhir-validation-resources-r4
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
org.apache.velocity
diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml
index b4a180ec8ca..6b9aa6546e9 100644
--- a/hapi-tinder-test/pom.xml
+++ b/hapi-tinder-test/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
diff --git a/pom.xml b/pom.xml
index d53c158c6e0..723188cb297 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir
pom
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
HAPI-FHIR
An open-source implementation of the FHIR specification in Java.
https://hapifhir.io
@@ -761,20 +761,21 @@
1.2.0
4.2.5
1.2
- 2.7.0
- 1.14
+ 3.0.1
+ 1.15
1.20
- 1.7
- 2.6
- 3.9
+ 1.9
+ 2.8.0
+ 3.12.0
1.2
1.5.0
10.14.2.0
2.5.1
+ 3.9.0
0.7.9
- 30.1-jre
- 2.8.5
+ 30.1.1-jre
+ 2.8.6
2.2.11_1
2.3.1
2.3.0.1
@@ -786,17 +787,17 @@
3.0.2
5.7.0
6.5.4
- 5.4.26.Final
- 6.0.0.Final
+ 5.4.30.Final
+ 6.0.2.Final
8.7.0
2.2
6.1.5.Final
4.4.13
4.5.13
- 2.12.1
- 2.11.3
- 3.1.0
+ 2.12.3
+ ${jackson_version}
+ 3.3.0
1.8
3.8.1
4.0.0.Beta3
@@ -807,15 +808,15 @@
1.2_5
1.7.30
2.11.1
- 5.3.3
+ 5.3.6
- 2.4.2
- 4.2.3.RELEASE
+ 2.4.7
+ 4.3.2
2.4.1
1.2.2.RELEASE
3.1.4
- 3.0.11.RELEASE
+ 3.0.12.RELEASE
4.4.1
@@ -999,7 +1000,7 @@
org.jetbrains
annotations
- 19.0.0
+ 20.1.0
commons-io
@@ -1150,7 +1151,7 @@
org.apache.commons
commons-dbcp2
- 2.7.0
+ 2.8.0
org.apache.commons
@@ -1312,7 +1313,7 @@
com.fasterxml.woodstox
woodstox-core
- 6.2.3
+ 6.2.5
org.ebaysf.web
@@ -1398,7 +1399,7 @@
org.fusesource.jansi
jansi
- 2.1.1
+ 2.3.2
org.glassfish
@@ -1553,12 +1554,12 @@
org.mockito
mockito-core
- 3.6.28
+ ${mockito_version}
org.mockito
mockito-junit-jupiter
- 3.3.3
+ ${mockito_version}
org.postgresql
@@ -1817,18 +1818,10 @@
true
-
- com.gemnasium
- gemnasium-maven-plugin
- 0.2.0
-
- github.com/hapifhir/hapi-fhir
-
-
org.basepom.maven
duplicate-finder-maven-plugin
- 1.4.0
+ 1.5.0
de.jpdigital
@@ -1889,12 +1882,12 @@
org.apache.maven.plugins
maven-javadoc-plugin
- 3.1.1
+ 3.2.0
org.apache.maven.plugins
maven-jar-plugin
- 3.1.2
+ 3.2.0
org.apache.maven.plugins
@@ -1909,7 +1902,7 @@
org.apache.maven.plugins
maven-plugin-plugin
- 3.5
+ 3.6.0
org.apache.maven.plugins
@@ -1919,14 +1912,7 @@
org.apache.maven.plugins
maven-source-plugin
- 3.1.0
-
-
- org.codehaus.plexus
- plexus-utils
- 3.1.0
-
-
+ 3.2.1
org.apache.maven.plugins
@@ -1948,7 +1934,7 @@
org.codehaus.mojo
build-helper-maven-plugin
- 3.0.0
+ 3.2.0
org.codehaus.mojo
@@ -1981,7 +1967,7 @@
org.codehaus.mojo
versions-maven-plugin
- 2.7
+ 2.8.1
false
@@ -2110,7 +2096,7 @@
org.apache.maven.plugins
maven-checkstyle-plugin
- 3.1.0
+ 3.1.2
com.puppycrawl.tools
@@ -2143,7 +2129,7 @@
- 3.3.9
+ 3.5.4
11
diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml
index 37c4a874033..7640191ae97 100644
--- a/restful-server-example/pom.xml
+++ b/restful-server-example/pom.xml
@@ -8,7 +8,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../pom.xml
diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
index 4e51e91c572..2ef7b674d4b 100644
--- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
+++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
index 20a96ef3c9e..85242f389d3 100644
--- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
index 897bcbb0b07..9716634f826 100644
--- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 5.4.0-PRE5-SNAPSHOT
+ 5.4.0-PRE6-SNAPSHOT
../../pom.xml