diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java index d9242050278..49e83fcc500 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java @@ -58,7 +58,7 @@ public class BulkItemReader implements ItemReader> { private void loadResourcePids() { Optional jobOpt = myBulkExportJobDao.findByJobId(myJobUUID); if (!jobOpt.isPresent()) { - ourLog.info("Job appears to be deleted"); + ourLog.warn("Job appears to be deleted"); return; } myJobEntity = jobOpt.get(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java index a34738ba227..e3d56176c52 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java @@ -27,7 +27,7 @@ public class ResourceToFileWriter implements ItemWriter> { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @Autowired - private FhirContext myContext; + private FhirContext myFhirContext; @Autowired private DaoRegistry myDaoRegistry; @@ -52,7 +52,7 @@ public class ResourceToFileWriter implements ItemWriter> { @PostConstruct public void start() { - myParser = myContext.newJsonParser().setPrettyPrint(false); + myParser = myFhirContext.newJsonParser().setPrettyPrint(false); myBinaryDao = getBinaryDao(); } @@ -73,7 +73,7 @@ public class ResourceToFileWriter implements ItemWriter> { } private IIdType createBinaryFromOutputStream() { - IBaseBinary binary = BinaryUtil.newBinary(myContext); + IBaseBinary binary = BinaryUtil.newBinary(myFhirContext); binary.setContentType(Constants.CT_FHIR_NDJSON); binary.setContent(myOutputStream.toByteArray()); @@ -97,6 +97,8 @@ public class ResourceToFileWriter implements ItemWriter> { } Optional createdId = flushToFiles(); - createdId.ifPresent(theIIdType -> ourLog.warn("Created resources for bulk export file containing {} resources of type ", theIIdType.toUnqualifiedVersionless().getValue())); + if (createdId.isPresent()) { + ourLog.info("Created resources for bulk export file containing {} resources of type ", createdId.get().toUnqualifiedVersionless().getValue()); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java index 2b3c916e9a0..a002d8c090f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java @@ -28,11 +28,7 @@ public class ResourceTypePartitioner implements Partitioner { Map partitionContextMap = new HashMap<>(); Map idToResourceType = myBulkExportDaoSvc.getBulkJobCollectionIdToResourceTypeMap( myJobUUID); - //observation -> obs1.json, obs2.json, obs3.json BulkJobCollectionEntity - //bulk Collection Entity ID -> patient - // 123123-> Patient - // 91876389126-> Observation idToResourceType.entrySet().stream() .forEach(entry -> { String resourceType = entry.getValue(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java index f420aa7dec3..a941e79a0ab 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java @@ -58,7 +58,7 @@ public class BulkExportDaoSvc { private BulkExportJobEntity loadJob(String theJobUUID) { Optional jobOpt = myBulkExportJobDao.findByJobId(theJobUUID); if (!jobOpt.isPresent()) { - ourLog.info("Job appears to be deleted"); + ourLog.warn("Job with UUID {} appears to be deleted", theJobUUID); return null; } return jobOpt.get(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java index 48d362dffb1..47ad9749a21 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java @@ -35,7 +35,4 @@ public interface IBulkExportCollectionDao extends JpaRepository myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(theJobId).getStatus() == BulkJobStatusEnum.COMPLETE); } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestJPAConfig.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestJPAConfig.java index be66c4b2eed..1914951a3aa 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestJPAConfig.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestJPAConfig.java @@ -8,14 +8,12 @@ import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber; import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig; -import ca.uhn.fhir.model.dstu2.resource.Provenance; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Lazy; import org.springframework.context.annotation.Primary; import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.transaction.PlatformTransactionManager; import javax.persistence.EntityManagerFactory; @@ -43,7 +41,8 @@ public class TestJPAConfig { } /* - I had to rename this bean as it was clashing with Spring Batch `transactionManager` in SimpleBatchConfiguration + Please do not rename this bean to "transactionManager()" as this will conflict with the transactionManager + provided by Spring Batch. */ @Bean @Primary