Server fixes from Connectathon 32 (#4838)

* NPM test cleanup

* fix imports

* Ongoing cleanup

* Rework memory queue

* Reindex cleanup

* Cleanup

* Clean up

* Test fixes

* Test fixes

* Test fixes

* Test fix

* Test fix
This commit is contained in:
James Agnew 2023-05-15 08:28:55 -04:00 committed by GitHub
parent 483ddca3be
commit 05defb58c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 877 additions and 395 deletions

View File

@ -70,8 +70,8 @@ stages:
# Put to top, but kept in order here # Put to top, but kept in order here
# - name: hapi_fhir_jpaserver_test_utilities # - name: hapi_fhir_jpaserver_test_utilities
# module: hapi-fhir-jpaserver-test-utilities # module: hapi-fhir-jpaserver-test-utilities
# - name: hapi_fhir_jpaserver_uhnfhirtest - name: hapi_fhir_jpaserver_uhnfhirtest
# module: hapi-fhir-jpaserver-uhnfhirtest module: hapi-fhir-jpaserver-uhnfhirtest
- name: hapi_fhir_server - name: hapi_fhir_server
module: hapi-fhir-server module: hapi-fhir-server
- name: hapi_fhir_server_mdm - name: hapi_fhir_server_mdm
@ -129,8 +129,8 @@ stages:
module: hapi-tinder-plugin module: hapi-tinder-plugin
- name: hapi_tinder_test - name: hapi_tinder_test
module: hapi-tinder-test module: hapi-tinder-test
# - name: tests_hapi_fhir_base_test_jaxrsserver_kotlin - name: tests_hapi_fhir_base_test_jaxrsserver_kotlin
# module: tests/hapi-fhir-base-test-jaxrsserver-kotlin module: tests/hapi-fhir-base-test-jaxrsserver-kotlin
- name: tests_hapi_fhir_base_test_mindeps_client - name: tests_hapi_fhir_base_test_mindeps_client
module: tests/hapi-fhir-base-test-mindeps-client module: tests/hapi-fhir-base-test-mindeps-client
- name: tests_hapi_fhir_base_test_mindeps_server - name: tests_hapi_fhir_base_test_mindeps_server

View File

@ -30,6 +30,8 @@ import ca.uhn.fhir.util.DateUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -355,17 +357,21 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
if (myLowerBound == null || myLowerBound.getValue() == null) { if (myLowerBound == null || myLowerBound.getValue() == null) {
return null; return null;
} }
Date retVal = myLowerBound.getValue(); return getLowerBoundAsInstant(myLowerBound);
}
if (myLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) { @Nonnull
private static Date getLowerBoundAsInstant(@Nonnull DateParam theLowerBound) {
Date retVal = theLowerBound.getValue();
if (theLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
retVal = DateUtils.getLowestInstantFromDate(retVal); retVal = DateUtils.getLowestInstantFromDate(retVal);
} }
if (myLowerBound.getPrefix() != null) { if (theLowerBound.getPrefix() != null) {
switch (myLowerBound.getPrefix()) { switch (theLowerBound.getPrefix()) {
case GREATERTHAN: case GREATERTHAN:
case STARTS_AFTER: case STARTS_AFTER:
retVal = myLowerBound.getPrecision().add(retVal, 1); retVal = theLowerBound.getPrecision().add(retVal, 1);
break; break;
case EQUAL: case EQUAL:
case NOT_EQUAL: case NOT_EQUAL:
@ -375,7 +381,7 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case APPROXIMATE: case APPROXIMATE:
case LESSTHAN_OR_EQUALS: case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE: case ENDS_BEFORE:
throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + myLowerBound.getPrefix()); throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix());
} }
} }
return retVal; return retVal;
@ -417,14 +423,19 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return null; return null;
} }
Date retVal = myUpperBound.getValue(); return getUpperBoundAsInstant(myUpperBound);
}
if (myUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) { @Nonnull
private static Date getUpperBoundAsInstant(@Nonnull DateParam theUpperBound) {
Date retVal = theUpperBound.getValue();
if (theUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
retVal = DateUtils.getHighestInstantFromDate(retVal); retVal = DateUtils.getHighestInstantFromDate(retVal);
} }
if (myUpperBound.getPrefix() != null) { if (theUpperBound.getPrefix() != null) {
switch (myUpperBound.getPrefix()) { switch (theUpperBound.getPrefix()) {
case LESSTHAN: case LESSTHAN:
case ENDS_BEFORE: case ENDS_BEFORE:
retVal = new Date(retVal.getTime() - 1L); retVal = new Date(retVal.getTime() - 1L);
@ -432,14 +443,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case EQUAL: case EQUAL:
case NOT_EQUAL: case NOT_EQUAL:
case LESSTHAN_OR_EQUALS: case LESSTHAN_OR_EQUALS:
retVal = myUpperBound.getPrecision().add(retVal, 1); retVal = theUpperBound.getPrecision().add(retVal, 1);
retVal = new Date(retVal.getTime() - 1L); retVal = new Date(retVal.getTime() - 1L);
break; break;
case GREATERTHAN_OR_EQUALS: case GREATERTHAN_OR_EQUALS:
case GREATERTHAN: case GREATERTHAN:
case APPROXIMATE: case APPROXIMATE:
case STARTS_AFTER: case STARTS_AFTER:
throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + myUpperBound.getPrefix()); throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + theUpperBound.getPrefix());
} }
} }
return retVal; return retVal;
@ -626,12 +637,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
* are the same value. As such, even though the prefixes for the lower and * are the same value. As such, even though the prefixes for the lower and
* upper bounds default to <code>ge</code> and <code>le</code> respectively, * upper bounds default to <code>ge</code> and <code>le</code> respectively,
* the resulting prefix is effectively <code>eq</code> where only a single * the resulting prefix is effectively <code>eq</code> where only a single
* date is provided - as required by the FHIR specificiation (i.e. "If no * date is provided - as required by the FHIR specification (i.e. "If no
* prefix is present, the prefix <code>eq</code> is assumed"). * prefix is present, the prefix <code>eq</code> is assumed").
*/ */
private void validateAndSet(DateParam lowerBound, DateParam upperBound) { private void validateAndSet(DateParam lowerBound, DateParam upperBound) {
if (hasBound(lowerBound) && hasBound(upperBound)) { if (hasBound(lowerBound) && hasBound(upperBound)) {
if (lowerBound.getValue().getTime() > upperBound.getValue().getTime()) { Date lowerBoundAsInstant = getLowerBoundAsInstant(lowerBound);
Date upperBoundAsInstant = getUpperBoundAsInstant(upperBound);
if (lowerBoundAsInstant.after(upperBoundAsInstant)) {
throw new DataFormatException(Msg.code(1932) + format( throw new DataFormatException(Msg.code(1932) + format(
"Lower bound of %s is after upper bound of %s", "Lower bound of %s is after upper bound of %s",
lowerBound.getValueAsString(), upperBound.getValueAsString())); lowerBound.getValueAsString(), upperBound.getValueAsString()));

View File

@ -29,9 +29,10 @@ public class DateRangeUtil {
/** /**
* Narrow the DateRange to be within theStartInclusive, and theEndExclusive, if provided. * Narrow the DateRange to be within theStartInclusive, and theEndExclusive, if provided.
*
* @param theDateRangeParam the initial range, null for unconstrained * @param theDateRangeParam the initial range, null for unconstrained
* @param theStartInclusive a lower bound to apply, or null for unchanged. * @param theStartInclusive a lower bound to apply, or null for unchanged.
* @param theEndExclusive an upper bound to apply, or null for unchanged. * @param theEndExclusive an upper bound to apply, or null for unchanged.
* @return a DateRange within the original range, and between theStartInclusive and theEnd * @return a DateRange within the original range, and between theStartInclusive and theEnd
*/ */
@Nonnull @Nonnull
@ -39,16 +40,23 @@ public class DateRangeUtil {
if (theStartInclusive == null && theEndExclusive == null) { if (theStartInclusive == null && theEndExclusive == null) {
return theDateRangeParam; return theDateRangeParam;
} }
DateRangeParam result = theDateRangeParam==null?new DateRangeParam():new DateRangeParam(theDateRangeParam); DateRangeParam result = theDateRangeParam == null ? new DateRangeParam() : new DateRangeParam(theDateRangeParam);
if (theStartInclusive != null) { Date startInclusive = theStartInclusive;
if (startInclusive != null) {
Date inputStart = result.getLowerBoundAsInstant(); Date inputStart = result.getLowerBoundAsInstant();
if (theDateRangeParam == null || inputStart == null || inputStart.before(theStartInclusive)) {
result.setLowerBoundInclusive(theStartInclusive); Date upperBound = result.getUpperBoundAsInstant();
if (upperBound != null && upperBound.before(startInclusive)) {
startInclusive = upperBound;
}
if (theDateRangeParam == null || inputStart == null || inputStart.before(startInclusive)) {
result.setLowerBoundInclusive(startInclusive);
} }
} }
if (theEndExclusive != null) { if (theEndExclusive != null) {
Date inputEnd = result.getUpperBound() == null? null : result.getUpperBound().getValue(); Date inputEnd = result.getUpperBound() == null ? null : result.getUpperBound().getValue();
if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) { if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) {
result.setUpperBoundExclusive(theEndExclusive); result.setUpperBoundExclusive(theEndExclusive);
} }

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum; import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
@ -59,6 +60,11 @@ class DateRangeUtilTest {
new DateParam(theResultStartPrefix, theResultStart), new DateParam(theResultEndPrefix, theResultEnd)); new DateParam(theResultStartPrefix, theResultStart), new DateParam(theResultEndPrefix, theResultEnd));
} }
static NarrowCase from(String theMessage, DateRangeParam theRange, Date theNarrowStart, Date theNarrowEnd,
DateParam theResultStart, DateParam theResultEnd) {
return new NarrowCase(theMessage, theRange, theNarrowStart, theNarrowEnd, theResultStart, theResultEnd);
}
@Override @Override
public String toString() { public String toString() {
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE) return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
@ -89,8 +95,23 @@ class DateRangeUtilTest {
// half-open cases // half-open cases
NarrowCase.from("end inside open end", new DateRangeParam(dateTwo, null), null, dateFour, dateTwo, dateFour), NarrowCase.from("end inside open end", new DateRangeParam(dateTwo, null), null, dateFour, dateTwo, dateFour),
NarrowCase.from("start inside open start", new DateRangeParam(null, dateFour), dateTwo, null, GREATERTHAN_OR_EQUALS, dateTwo, LESSTHAN_OR_EQUALS, dateFour), NarrowCase.from("start inside open start", new DateRangeParam(null, dateFour), dateTwo, null, GREATERTHAN_OR_EQUALS, dateTwo, LESSTHAN_OR_EQUALS, dateFour),
NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour) NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour),
NarrowCase.from("lt date level precision date, narrow from is inside date",
new DateRangeParam(new DateParam(LESSTHAN, "2023-05-06")),
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
Date.from(Instant.parse("2023-05-10T00:00:00.000+00:00")),
new DateParam(GREATERTHAN_OR_EQUALS, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00"))),
new DateParam(LESSTHAN, "2023-05-06")
),
NarrowCase.from("gt date level precision date, narrow to is inside date",
new DateRangeParam(new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06")),
Date.from(Instant.parse("2023-05-01T00:00:00.000+00:00")),
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06"),
new DateParam(LESSTHAN, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")))
)
); );
} }

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 4838
title: "Two failures in the $delete-expunge operation were fixed:
<ul>
<li>Jobs could fail if hibernate search was loaded but not enabled.</li>
<li>Jobs could fail if the criteria included a <code>_lastUpdated=lt[date]</code> clause</li>
</ul>"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4838
title: "The BALP AsyncMemoryQueueBackedFhirClientBalpSink incorrectly used a non-blocking method
to add events to the blocking queue, resulting in race conditions on a heavily loaded
server."

View File

@ -0,0 +1,7 @@
---
type: add
issue: 4838
title: "When performing a resource reindex on a deleted resource, any search index rows will now
be deleted. Deleting a resource should generally not leave any such rows behind, but they can
be left if the resource is manually deleted using SQL directly against the database and in this
case the reindex job will now clean up these unwanted rows."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4838
title: "When reindexing resources, deleted resources could incorrectly fail validation rules and
cause the reindex job to not complete correctly. This has been corrected."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 4838
title: "When invoking the instance level `$reindex` and `$reindex-dryrun` operations, the resulting
status message and any warnings are now included in the response Parameters object as well as in
the generated response HTML narrative."

View File

@ -961,7 +961,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
* This should be the very first thing.. * This should be the very first thing..
*/ */
if (theResource != null) { if (theResource != null) {
if (thePerformIndexing) { if (thePerformIndexing && theDeletedTimestampOrNull == null) {
if (!ourValidationDisabledForUnitTest) { if (!ourValidationDisabledForUnitTest) {
validateResourceForStorage((T) theResource, entity); validateResourceForStorage((T) theResource, entity);
} }
@ -1057,7 +1057,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest); verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest);
} }
entity.setUpdated(theTransactionDetails.getTransactionDate()); if (CURRENTLY_REINDEXING.get(theResource) != Boolean.TRUE) {
entity.setUpdated(theTransactionDetails.getTransactionDate());
}
newParams.populateResourceTableSearchParamsPresentFlags(entity); newParams.populateResourceTableSearchParamsPresentFlags(entity);
entity.setIndexStatus(INDEX_STATUS_INDEXED); entity.setIndexStatus(INDEX_STATUS_INDEXED);
} }
@ -1151,6 +1153,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (thePerformIndexing) { if (thePerformIndexing) {
if (newParams == null) { if (newParams == null) {
myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId())); myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
entity.clearAllParamsPopulated();
} else { } else {
// Synchronize search param indexes // Synchronize search param indexes

View File

@ -1387,7 +1387,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
if (theReindexParameters.getReindexSearchParameters() == ReindexParameters.ReindexSearchParametersEnum.ALL) { if (theReindexParameters.getReindexSearchParameters() == ReindexParameters.ReindexSearchParametersEnum.ALL) {
reindexSearchParameters(entity, retVal); reindexSearchParameters(entity, retVal, theTransactionDetails);
} }
if (theReindexParameters.getOptimizeStorage() != ReindexParameters.OptimizeStorageModeEnum.NONE) { if (theReindexParameters.getOptimizeStorage() != ReindexParameters.OptimizeStorageModeEnum.NONE) {
reindexOptimizeStorage(entity, theReindexParameters.getOptimizeStorage()); reindexOptimizeStorage(entity, theReindexParameters.getOptimizeStorage());
@ -1397,16 +1397,42 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome) { private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome, TransactionDetails theTransactionDetails) {
try { try {
T resource = (T) myJpaStorageResourceParser.toResource(entity, false); T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
reindex(resource, entity); reindexSearchParameters(resource, entity, theTransactionDetails);
} catch (Exception e) { } catch (Exception e) {
theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e); theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e);
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED); myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
} }
} }
/**
* @deprecated Use {@link #reindex(IResourcePersistentId, ReindexParameters, RequestDetails, TransactionDetails)}
*/
@Deprecated
@Override
public void reindex(T theResource, IBasePersistedResource theEntity) {
assert TransactionSynchronizationManager.isActualTransactionActive();
ResourceTable entity = (ResourceTable) theEntity;
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
reindexSearchParameters(theResource, theEntity, transactionDetails);
}
private void reindexSearchParameters(T theResource, IBasePersistedResource theEntity, TransactionDetails transactionDetails) {
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) { private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity(); ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
if (historyEntity != null) { if (historyEntity != null) {
@ -1546,23 +1572,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return entity; return entity;
} }
@Override
public void reindex(T theResource, IBasePersistedResource theEntity) {
assert TransactionSynchronizationManager.isActualTransactionActive();
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
ResourceTable entity = (ResourceTable) theEntity;
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
@Transactional @Transactional
@Override @Override
public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm) { public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm) {

View File

@ -85,6 +85,10 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long>, I
@Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id") @Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus); void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
@Modifying
@Query("UPDATE ResourceTable t SET t.myUpdated = :updated WHERE t.myId = :id")
void updateLastUpdated(@Param("id") Long theId, @Param("updated") Date theUpdated);
@Modifying @Modifying
@Query("DELETE FROM ResourceTable t WHERE t.myId = :pid") @Query("DELETE FROM ResourceTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId); void deleteByPid(@Param("pid") Long theId);

View File

@ -39,9 +39,6 @@ import java.util.stream.Collectors;
public class DeleteExpungeSqlBuilder { public class DeleteExpungeSqlBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class); private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class);
public static final String PROCESS_NAME = "Delete Expunging";
public static final String THREAD_PREFIX = "delete-expunge";
private final ResourceTableFKProvider myResourceTableFKProvider; private final ResourceTableFKProvider myResourceTableFKProvider;
private final JpaStorageSettings myStorageSettings; private final JpaStorageSettings myStorageSettings;
private final IIdHelperService myIdHelper; private final IIdHelperService myIdHelper;

View File

@ -69,7 +69,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
* This method clears the Hibernate Search index for the given resources. * This method clears the Hibernate Search index for the given resources.
*/ */
private void clearHibernateSearchIndex(List<JpaPid> thePersistentIds) { private void clearHibernateSearchIndex(List<JpaPid> thePersistentIds) {
if (myFullTextSearchSvc != null) { if (myFullTextSearchSvc != null && !myFullTextSearchSvc.isDisabled()) {
List<Object> objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList()); List<Object> objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList());
myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds); myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds);
ourLog.info("Cleared Hibernate Search indexes."); ourLog.info("Cleared Hibernate Search indexes.");

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc; import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
@ -58,8 +59,6 @@ import org.hl7.fhir.utilities.npm.NpmPackage;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
@ -90,7 +89,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
@Autowired @Autowired
private IHapiPackageCacheManager myPackageCacheManager; private IHapiPackageCacheManager myPackageCacheManager;
@Autowired @Autowired
private PlatformTransactionManager myTxManager; private IHapiTransactionService myTxService;
@Autowired @Autowired
private INpmPackageVersionDao myPackageVersionDao; private INpmPackageVersionDao myPackageVersionDao;
@Autowired @Autowired
@ -128,9 +127,10 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
} }
} }
public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) { @Override
return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion()); public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) {
} return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion());
}
/** /**
* Loads and installs an IG from a file on disk or the Simplifier repo using * Loads and installs an IG from a file on disk or the Simplifier repo using
@ -152,12 +152,12 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
if (enabled) { if (enabled) {
try { try {
boolean exists = new TransactionTemplate(myTxManager).execute(tx -> { boolean exists = myTxService.withSystemRequest().withRequestPartitionId(RequestPartitionId.defaultPartition()).execute(() -> {
Optional<NpmPackageVersionEntity> existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion()); Optional<NpmPackageVersionEntity> existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion());
return existing.isPresent(); return existing.isPresent();
}); });
if (exists) { if (exists) {
ourLog.info("Package {}#{} is already installed", theInstallationSpec.getName(), theInstallationSpec.getVersion()); ourLog.info("Package {}#{} is already installed", theInstallationSpec.getName(), theInstallationSpec.getVersion());
} }
NpmPackage npmPackage = myPackageCacheManager.installPackage(theInstallationSpec); NpmPackage npmPackage = myPackageCacheManager.installPackage(theInstallationSpec);
@ -267,8 +267,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
} }
} catch (IOException e) { } catch (IOException e) {
throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format( throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format("Cannot resolve dependency %s#%s", id, ver), e);
"Cannot resolve dependency %s#%s", id, ver), e);
} }
} }
} }
@ -278,8 +277,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
* Asserts if package FHIR version is compatible with current FHIR version * Asserts if package FHIR version is compatible with current FHIR version
* by using semantic versioning rules. * by using semantic versioning rules.
*/ */
protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) throws ImplementationGuideInstallationException {
throws ImplementationGuideInstallationException {
FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion); FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion);
FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion); FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion);
@ -290,9 +288,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
compatible = true; compatible = true;
} }
if (!compatible) { if (!compatible) {
throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format( throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format("Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)", currentFhirVersion, fhirVersion));
"Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)",
currentFhirVersion, fhirVersion));
} }
} }
@ -336,26 +332,18 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext)); ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext));
} }
} }
} } else {
else{
ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue()); ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue());
} }
} }
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) { private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
if (myPartitionSettings.isPartitioningEnabled()) { return theDao.search(theMap, newSystemRequestDetails());
SystemRequestDetails requestDetails = newSystemRequestDetails();
return theDao.search(theMap, requestDetails);
} else {
return theDao.search(theMap);
}
} }
@Nonnull @Nonnull
private SystemRequestDetails newSystemRequestDetails() { private SystemRequestDetails newSystemRequestDetails() {
return return new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition());
new SystemRequestDetails()
.setRequestPartitionId(RequestPartitionId.defaultPartition());
} }
private void createResource(IFhirResourceDao theDao, IBaseResource theResource) { private void createResource(IFhirResourceDao theDao, IBaseResource theResource) {
@ -400,8 +388,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
} }
if (!isValidResourceStatusForPackageUpload(theResource)) { if (!isValidResourceStatusForPackageUpload(theResource)) {
ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", theResource.fhirType(), theResource.getIdElement().getValue());
theResource.fhirType(), theResource.getIdElement().getValue());
return false; return false;
} }
@ -458,8 +445,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
try { try {
return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null); return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null);
} catch (Exception e) { } catch (Exception e) {
throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format( throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format("Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
"Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
} }
} }

View File

@ -26,10 +26,13 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.entity.*;
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor; import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
@ -40,6 +43,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams; import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer; import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseParameters;
@ -59,6 +63,7 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -132,6 +137,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
@Nonnull @Nonnull
private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) { private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) {
StopWatch sw = new StopWatch();
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType()); IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails); ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false); IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
@ -144,16 +150,26 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity); ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity);
existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents()); existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
dao.reindex(resource, entity); List<String> messages = new ArrayList<>();
JpaPid pid = JpaPid.fromId(entity.getId());
ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails());
messages.add("Reindex completed in " + sw);
for (String next : outcome.getWarnings()) {
messages.add("WARNING: " + next);
}
ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(entity); ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(entity);
newParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents()); newParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true); return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true, messages);
} }
@Nonnull @Nonnull
private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set<String> theParameters) { private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set<String> theParameters) {
StopWatch sw = new StopWatch();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType()); IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails); ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false); IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
@ -186,7 +202,8 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
showAction = false; showAction = false;
} }
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction); String message = "Reindex dry-run completed in " + sw + ". No changes were committed to any stored data.";
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction, List.of(message));
} }
@Nonnull @Nonnull
@ -197,12 +214,16 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
@Nonnull @Nonnull
@VisibleForTesting @VisibleForTesting
Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction) { Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction, List<String> theMessages) {
Parameters parameters = new Parameters(); Parameters parameters = new Parameters();
Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter(); Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter();
narrativeParameter.setName("Narrative"); narrativeParameter.setName("Narrative");
for (String next : theMessages) {
parameters.addParameter("Message", new StringType(next));
}
// Normal indexes // Normal indexes
addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction); addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction);
addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction); addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction);

View File

@ -1,5 +1,15 @@
<div xmlns:th="http://www.thymeleaf.org"> <div xmlns:th="http://www.thymeleaf.org">
<!--/* Messages */-->
<div th:if="${resource.hasParameter('Message')}" id="Messages">
<h1>Outcome</h1>
<ul>
<li th:each="part : ${resource.getParameters('Message')}">
[[${part.getValue().getValue()}]]
</li>
</ul>
</div>
<!--/* Number Indexes */--> <!--/* Number Indexes */-->
<div th:if="${resource.hasParameter('NumberIndexes')}" id="NumberIndexes"> <div th:if="${resource.hasParameter('NumberIndexes')}" id="NumberIndexes">
<h1>Number Indexes</h1> <h1>Number Indexes</h1>

View File

@ -1,12 +1,263 @@
package ca.uhn.fhir.jpa.packages; package ca.uhn.fhir.jpa.packages;
import org.elasticsearch.common.inject.Inject; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.Communication;
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.utilities.npm.NpmPackage;
import org.hl7.fhir.utilities.npm.PackageGenerator;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.annotation.Nonnull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class PackageInstallerSvcImplTest { public class PackageInstallerSvcImplTest {
public static final String PACKAGE_VERSION = "1.0";
public static final String PACKAGE_ID_1 = "package1";
@Mock
private INpmPackageVersionDao myPackageVersionDao;
@Mock
private IHapiPackageCacheManager myPackageCacheManager;
@Mock
private ISearchParamRegistryController mySearchParamRegistryController;
@Mock
private DaoRegistry myDaoRegistry;
@Mock
private IFhirResourceDao<CodeSystem> myCodeSystemDao;
@Spy
private FhirContext myCtx = FhirContext.forR4Cached();
@Spy
private IHapiTransactionService myTxService = new NonTransactionalHapiTransactionService();
@Spy
private PackageResourceParsingSvc myPackageResourceParsingSvc = new PackageResourceParsingSvc(myCtx);
@Spy
private PartitionSettings myPartitionSettings = new PartitionSettings();
@InjectMocks
private PackageInstallerSvcImpl mySvc;
@Test @Test
public void testPackageCompatibility() { public void testPackageCompatibility() {
new PackageInstallerSvcImpl().assertFhirVersionsAreCompatible("R4", "R4B"); mySvc.assertFhirVersionsAreCompatible("R4", "R4B");
} }
@Test
public void testValidForUpload_SearchParameterWithMetaParam() {
SearchParameter sp = new SearchParameter();
sp.setCode("_id");
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoBase() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoExpression() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_GoodSearchParameter() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertTrue(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_RequestedSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED);
assertTrue(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ErrorSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ERROR);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ActiveSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_DocumentRefStatusValuePresent() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.ENTEREDINERROR);
assertTrue(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_DocumentRefStatusValueNull() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.NULL);
assertFalse(mySvc.validForUpload(documentReference));
documentReference.setStatus(null);
assertFalse(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_CommunicationStatusValuePresent() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NOTDONE);
assertTrue(mySvc.validForUpload(communication));
}
@Test
public void testValidForUpload_CommunicationStatusValueNull() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NULL);
assertFalse(mySvc.validForUpload(communication));
communication.setStatus(null);
assertFalse(mySvc.validForUpload(communication));
}
@Test
public void testDontTryToInstallDuplicateCodeSystem_CodeSystemAlreadyExistsWithDifferentId() throws IOException {
// Setup
// The CodeSystem that is already saved in the repository
CodeSystem existingCs = new CodeSystem();
existingCs.setId("CodeSystem/existingcs");
existingCs.setUrl("http://my-code-system");
existingCs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
// A new code system in a package we're installing that has the
// same URL as the previously saved one, but a different ID.
CodeSystem cs = new CodeSystem();
cs.setId("CodeSystem/mycs");
cs.setUrl("http://my-code-system");
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
NpmPackage pkg = createPackage(cs, PACKAGE_ID_1);
when(myPackageVersionDao.findByPackageIdAndVersion(any(), any())).thenReturn(Optional.empty());
when(myPackageCacheManager.installPackage(any())).thenReturn(pkg);
when(myDaoRegistry.getResourceDao(CodeSystem.class)).thenReturn(myCodeSystemDao);
when(myCodeSystemDao.search(any(), any())).thenReturn(new SimpleBundleProvider(existingCs));
when(myCodeSystemDao.update(any(),any(RequestDetails.class))).thenReturn(new DaoMethodOutcome());
PackageInstallationSpec spec = new PackageInstallationSpec();
spec.setName(PACKAGE_ID_1);
spec.setVersion(PACKAGE_VERSION);
spec.setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
spec.setPackageContents(packageToBytes(pkg));
// Test
mySvc.install(spec);
// Verify
verify(myCodeSystemDao, times(1)).search(mySearchParameterMapCaptor.capture(), any());
SearchParameterMap map = mySearchParameterMapCaptor.getValue();
assertEquals("?url=http%3A%2F%2Fmy-code-system", map.toNormalizedQueryString(myCtx));
verify(myCodeSystemDao, times(1)).update(myCodeSystemCaptor.capture(), any(RequestDetails.class));
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
assertEquals("existingcs", codeSystem.getIdPart());
}
@Nonnull
private static byte[] packageToBytes(NpmPackage pkg) throws IOException {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
pkg.save(stream);
byte[] bytes = stream.toByteArray();
return bytes;
}
@Captor
private ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
@Captor
private ArgumentCaptor<CodeSystem> myCodeSystemCaptor;
@Nonnull
private NpmPackage createPackage(CodeSystem cs, String packageId) throws IOException {
PackageGenerator manifestGenerator = new PackageGenerator();
manifestGenerator.name(packageId);
manifestGenerator.version(PACKAGE_VERSION);
manifestGenerator.description("a package");
manifestGenerator.fhirVersions(List.of(FhirVersionEnum.R4.getFhirVersionString()));
NpmPackage pkg = NpmPackage.empty(manifestGenerator);
String csString = myCtx.newJsonParser().encodeResourceToString(cs);
pkg.addFile("package", "cs.json", csString.getBytes(StandardCharsets.UTF_8), "CodeSystem");
return pkg;
}
} }

View File

@ -620,6 +620,23 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
myHasLinks = theHasLinks; myHasLinks = theHasLinks;
} }
/**
* Clears all the index population flags, e.g. {@link #isParamsStringPopulated()}
*
* @since 6.8.0
*/
public void clearAllParamsPopulated() {
myParamsTokenPopulated = false;
myParamsCoordsPopulated = false;
myParamsDatePopulated = false;
myParamsNumberPopulated = false;
myParamsStringPopulated = false;
myParamsQuantityPopulated = false;
myParamsQuantityNormalizedPopulated = false;
myParamsUriPopulated = false;
myHasLinks = false;
}
public boolean isParamsComboStringUniquePresent() { public boolean isParamsComboStringUniquePresent() {
if (myParamsComboStringUniquePresent == null) { if (myParamsComboStringUniquePresent == null) {
return false; return false;

View File

@ -85,6 +85,7 @@ import org.springframework.data.domain.Slice;
import org.springframework.util.comparator.ComparableComparator; import org.springframework.util.comparator.ComparableComparator;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.persistence.Id;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@ -100,6 +101,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.eq;
@ -3342,6 +3344,34 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
} }
/**
* See the class javadoc before changing the counts in this test!
*/
@Test
public void testDeleteResource_WithOutgoingReference() {
// Setup
createOrganization(withId("A"));
IIdType patientId = createPatient(withOrganization(new IdType("Organization/A")), withActiveTrue());
// Test
myCaptureQueriesListener.clear();
myPatientDao.delete(patientId, mySrd);
// Verify
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(3, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
runInTransaction(()->{
ResourceTable version = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow();
assertFalse(version.isParamsTokenPopulated());
assertFalse(version.isHasLinks());
assertEquals(0, myResourceIndexedSearchParamTokenDao.count());
assertEquals(0, myResourceLinkDao.count());
});
}
/** /**
* See the class javadoc before changing the counts in this test! * See the class javadoc before changing the counts in this test!
*/ */
@ -3353,6 +3383,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
Observation observation = new Observation().setStatus(Observation.ObservationStatus.FINAL).addCategory(new CodeableConcept().addCoding(new Coding("http://category-type", "12345", null))).setCode(new CodeableConcept().addCoding(new Coding("http://coverage-type", "12345", null))); Observation observation = new Observation().setStatus(Observation.ObservationStatus.FINAL).addCategory(new CodeableConcept().addCoding(new Coding("http://category-type", "12345", null))).setCode(new CodeableConcept().addCoding(new Coding("http://coverage-type", "12345", null)));
IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt(); IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt();
runInTransaction(()->{
assertEquals(4, myResourceIndexedSearchParamTokenDao.count());
ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow();
assertTrue(version.isParamsTokenPopulated());
});
// when // when
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();
@ -3360,6 +3395,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// then // then
assertQueryCount(3, 1, 1, 2); assertQueryCount(3, 1, 1, 2);
runInTransaction(()->{
assertEquals(0, myResourceIndexedSearchParamTokenDao.count());
ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow();
assertFalse(version.isParamsTokenPopulated());
});
} }
private void assertQueryCount(int theExpectedSelectCount, int theExpectedUpdateCount, int theExpectedInsertCount, int theExpectedDeleteCount) { private void assertQueryCount(int theExpectedSelectCount, int theExpectedUpdateCount, int theExpectedInsertCount, int theExpectedDeleteCount) {

View File

@ -1,132 +0,0 @@
package ca.uhn.fhir.jpa.packages;
import ca.uhn.fhir.context.FhirContext;
import org.hl7.fhir.r4.model.Communication;
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.Subscription;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
class PackageInstallerSvcImplTest {
private final FhirContext myCtx = FhirContext.forR4Cached();
private PackageInstallerSvcImpl mySvc;
@BeforeEach
public void before() {
mySvc = new PackageInstallerSvcImpl();
mySvc.setFhirContextForUnitTest(myCtx);
}
@Test
public void testValidForUpload_SearchParameterWithMetaParam() {
SearchParameter sp = new SearchParameter();
sp.setCode("_id");
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoBase() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoExpression() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_GoodSearchParameter() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertTrue(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_RequestedSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED);
assertTrue(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ErrorSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ERROR);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ActiveSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_DocumentRefStatusValuePresent() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.ENTEREDINERROR);
assertTrue(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_DocumentRefStatusValueNull() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.NULL);
assertFalse(mySvc.validForUpload(documentReference));
documentReference.setStatus(null);
assertFalse(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_CommunicationStatusValuePresent() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NOTDONE);
assertTrue(mySvc.validForUpload(communication));
}
@Test
public void testValidForUpload_CommunicationStatusValueNull() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NULL);
assertFalse(mySvc.validForUpload(communication));
communication.setStatus(null);
assertFalse(mySvc.validForUpload(communication));
}
}

View File

@ -66,6 +66,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
assertEquals(2, outcome.getRecordsProcessed()); assertEquals(2, outcome.getRecordsProcessed());
assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(6, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries();
assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(1, myCaptureQueriesListener.getCommitCount()); assertEquals(1, myCaptureQueriesListener.getCommitCount());

View File

@ -3,12 +3,16 @@ package ca.uhn.fhir.jpa.reindex;
import ca.uhn.fhir.jpa.api.pid.IResourcePidList; import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.pid.TypedResourcePid; import ca.uhn.fhir.jpa.api.pid.TypedResourcePid;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.InstantType;
import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -151,5 +155,4 @@ public class ResourceReindexSvcImplTest extends BaseJpaR4Test {
} }
} }

View File

@ -49,6 +49,7 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.provider.JpaSystemProvider; import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.search.reindex.IInstanceReindexService;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc; import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
@ -144,6 +145,8 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
@Autowired @Autowired
protected PartitionSettings myPartitionSettings; protected PartitionSettings myPartitionSettings;
@Autowired @Autowired
protected IInstanceReindexService myInstanceReindexService;
@Autowired
protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc; protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc;
@Autowired @Autowired
protected IFhirResourceDao<ClinicalUseDefinition> myClinicalUseDefinitionDao; protected IFhirResourceDao<ClinicalUseDefinition> myClinicalUseDefinitionDao;

View File

@ -0,0 +1,58 @@
package ca.uhn.fhir.jpa.dao.r5;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.SearchParameter;
import org.junit.jupiter.api.Test;
import org.springframework.test.context.ContextConfiguration;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.*;
@ContextConfiguration(classes = TestHSearchAddInConfig.NoFT.class)
@SuppressWarnings({"Duplicates"})
public class FhirResourceDaoR5ReindexTest extends BaseJpaR5Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR5ReindexTest.class);
@Test
public void testReindexDeletedSearchParameter() {
SearchParameter sp = new SearchParameter();
sp.setCode("foo");
sp.addBase(Enumerations.VersionIndependentResourceTypesAll.PATIENT);
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.setExpression("Patient.name");
sp.setType(Enumerations.SearchParamType.STRING);
IIdType id = mySearchParameterDao.create(sp, mySrd).getId().toUnqualifiedVersionless();
mySearchParameterDao.delete(id, mySrd);
runInTransaction(() -> {
assertEquals(1, myEntityManager.createNativeQuery("UPDATE HFJ_RESOURCE set sp_uri_present = true").executeUpdate());
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow();
assertTrue(table.isParamsUriPopulated());
ResourceIndexedSearchParamUri uri = new ResourceIndexedSearchParamUri(new PartitionSettings(), "SearchParameter", "url", "http://foo");
uri.setResource(table);
uri.calculateHashes();
myResourceIndexedSearchParamUriDao.save(uri);
});
Parameters outcome = (Parameters) myInstanceReindexService.reindex(mySrd, id);
ourLog.info("Outcome: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertThat(outcome.getParameter("Narrative").getValueStringType().getValue(), containsString("Reindex completed in"));
assertEquals("REMOVE", outcome.getParameter("UriIndexes").getPartFirstRep().getPartFirstRep().getValueCodeType().getValue());
runInTransaction(() -> {
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow();
assertFalse(table.isParamsUriPopulated());
});
}
}

View File

@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -53,7 +54,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myComboTokenNonUnique.add(new ResourceIndexedComboTokenNonUnique(myPartitionSettings, myEntity, "Patient?identifier=123")); newParams.myComboTokenNonUnique.add(new ResourceIndexedComboTokenNonUnique(myPartitionSettings, myEntity, "Patient?identifier=123"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -71,7 +72,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myComboStringUniques.add(new ResourceIndexedComboStringUnique(myEntity, "Patient?identifier=123", new IdType("Parameter/foo"))); newParams.myComboStringUniques.add(new ResourceIndexedComboStringUnique(myEntity, "Patient?identifier=123", new IdType("Parameter/foo")));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -94,7 +95,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.mySearchParamPresentEntities.add(subject); newParams.mySearchParamPresentEntities.add(subject);
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -117,7 +118,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myNumberParams.add(new ResourceIndexedSearchParamNumber(myPartitionSettings, "Immunization", "dose", BigDecimal.ONE)); newParams.myNumberParams.add(new ResourceIndexedSearchParamNumber(myPartitionSettings, "Immunization", "dose", BigDecimal.ONE));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -136,7 +137,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myLinks.add(ResourceLink.forLocalReference("Observation.subject", myEntity, "Patient", 123L, "123", new Date(), 555L)); newParams.myLinks.add(ResourceLink.forLocalReference("Observation.subject", myEntity, "Patient", 123L, "123", new Date(), 555L));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -156,7 +157,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myLinks.add(ResourceLink.forLogicalReference("Observation.subject", myEntity, "http://foo/base/Patient/456", new Date())); newParams.myLinks.add(ResourceLink.forLogicalReference("Observation.subject", myEntity, "http://foo/base/Patient/456", new Date()));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -177,7 +178,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myLinks.add(ResourceLink.forAbsoluteReference("Observation.subject", myEntity, new IdType("http://foo/base/Patient/123"), new Date())); newParams.myLinks.add(ResourceLink.forAbsoluteReference("Observation.subject", myEntity, new IdType("http://foo/base/Patient/123"), new Date()));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -198,7 +199,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myQuantityParams.add(new ResourceIndexedSearchParamQuantity(myPartitionSettings, "Observation", "value-quantity", BigDecimal.valueOf(123), "http://unitsofmeasure.org", "kg")); newParams.myQuantityParams.add(new ResourceIndexedSearchParamQuantity(myPartitionSettings, "Observation", "value-quantity", BigDecimal.valueOf(123), "http://unitsofmeasure.org", "kg"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -219,7 +220,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myQuantityNormalizedParams.add(new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, "Observation", "value-quantity", 123.0, "http://unitsofmeasure.org", "kg")); newParams.myQuantityNormalizedParams.add(new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, "Observation", "value-quantity", 123.0, "http://unitsofmeasure.org", "kg"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -240,7 +241,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myStringParams.add(new ResourceIndexedSearchParamString(myPartitionSettings, myStorageSettings, "Patient", "family", "Simpson", "SIMPSON")); newParams.myStringParams.add(new ResourceIndexedSearchParamString(myPartitionSettings, myStorageSettings, "Patient", "family", "Simpson", "SIMPSON"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -260,7 +261,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myTokenParams.add(new ResourceIndexedSearchParamToken(myPartitionSettings, "Observation", "identifier", "http://id-system", "id-value")); newParams.myTokenParams.add(new ResourceIndexedSearchParamToken(myPartitionSettings, "Observation", "identifier", "http://id-system", "id-value"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify
@ -280,7 +281,7 @@ public class InstanceReindexServiceImplNarrativeR5Test {
newParams.myUriParams.add(new ResourceIndexedSearchParamUri(myPartitionSettings, "CodeSystem", "uri", "http://some-codesystem")); newParams.myUriParams.add(new ResourceIndexedSearchParamUri(myPartitionSettings, "CodeSystem", "uri", "http://some-codesystem"));
// Test // Test
Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true); Parameters outcome = mySvc.buildIndexResponse(newParams(), newParams, true, Collections.emptyList());
ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Output:\n{}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
// Verify // Verify

View File

@ -11,7 +11,7 @@
</parent> </parent>
<artifactId>hapi-fhir-jpaserver-uhnfhirtest</artifactId> <artifactId>hapi-fhir-jpaserver-uhnfhirtest</artifactId>
<packaging>war</packaging> <packaging>jar</packaging>
<name>HAPI FHIR - fhirtest.uhn.ca Deployable WAR</name> <name>HAPI FHIR - fhirtest.uhn.ca Deployable WAR</name>
@ -230,26 +230,6 @@
</plugins> </plugins>
</pluginManagement> </pluginManagement>
<plugins> <plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<archive>
<manifestEntries>
<Build-Time>${maven.build.timestamp}</Build-Time>
</manifestEntries>
</archive>
<overlays>
<overlay>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId>
<excludes>
<exclude>WEB-INF/classes/**/*.class</exclude>
</excludes>
</overlay>
</overlays>
</configuration>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId> <artifactId>maven-deploy-plugin</artifactId>
@ -261,4 +241,41 @@
<finalName>hapi-fhir-jpaserver</finalName> <finalName>hapi-fhir-jpaserver</finalName>
</build> </build>
<profiles>
<profile>
<id>DIST</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>war</goal>
</goals>
</execution>
</executions>
<configuration>
<archive>
<manifestEntries>
<Build-Time>${maven.build.timestamp}</Build-Time>
</manifestEntries>
</archive>
<overlays>
<overlay>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId>
<excludes>
<exclude>WEB-INF/classes/**/*.class</exclude>
</excludes>
</overlay>
</overlays>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project> </project>

View File

@ -1,20 +0,0 @@
package ca.uhn.fhirtest;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
/**
* Created by mochaholic on 18/02/2015.
*/
public class MySqlServer implements InitializingBean, DisposableBean {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(MySqlServer.class);
@Override
public void destroy() throws Exception {
}
@Override
public void afterPropertiesSet() throws Exception {
}
}

View File

@ -333,7 +333,7 @@ public class TestRestfulServer extends RestfulServer {
} }
/** /**
* The public server is deployed to http://fhirtest.uhn.ca and the JEE webserver * The public server is deployed to <a href="https://hapi.fhir.org">hapi.fhir.org</a> and the JEE webserver
* where this FHIR server is deployed is actually fronted by an Apache HTTPd instance, * where this FHIR server is deployed is actually fronted by an Apache HTTPd instance,
* so we use an address strategy to let the server know how it should address itself. * so we use an address strategy to let the server know how it should address itself.
*/ */

View File

@ -5,5 +5,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
public class CommonJpaStorageSettingsConfigurer { public class CommonJpaStorageSettingsConfigurer {
public CommonJpaStorageSettingsConfigurer(JpaStorageSettings theStorageSettings) { public CommonJpaStorageSettingsConfigurer(JpaStorageSettings theStorageSettings) {
theStorageSettings.setIndexOnUpliftedRefchains(true); theStorageSettings.setIndexOnUpliftedRefchains(true);
theStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(false);
} }
} }

View File

@ -0,0 +1,65 @@
package ca.uhn.fhirtest.config;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.model.DateType;
import org.quartz.JobExecutionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.context.event.EventListener;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.List;
public class OldAuditEventPurgeService {
private static final Logger ourLog = LoggerFactory.getLogger(OldAuditEventPurgeService.class);
@Autowired
private ISchedulerService mySchedulerSvc;
@Autowired
private IDeleteExpungeJobSubmitter myDeleteExpungeSubmitter;
@Autowired
private JpaStorageSettings myStorageSettings;
@EventListener(ContextRefreshedEvent.class)
public void start() {
myStorageSettings.setAllowMultipleDelete(true);
myStorageSettings.setExpungeEnabled(true);
myStorageSettings.setDeleteExpungeEnabled(true);
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(OldAuditEventPurgeServiceJob.class.getName());
jobDetail.setJobClass(OldAuditEventPurgeServiceJob.class);
mySchedulerSvc.scheduleLocalJob(DateUtils.MILLIS_PER_DAY, jobDetail);
}
private void doPass() {
Date cutoff = DateUtils.addDays(new Date(), -7);
String cutoffString = new DateType(cutoff).getValueAsString();
String url = "AuditEvent?_lastUpdated=lt" + cutoffString;
ourLog.info("Submitting an AuditEvent purge job with URL: {}", url);
myDeleteExpungeSubmitter.submitJob(1000, List.of(url), new SystemRequestDetails());
}
public static class OldAuditEventPurgeServiceJob implements HapiJob {
@Autowired
private OldAuditEventPurgeService mySvc;
@Override
public void execute(JobExecutionContext context) {
mySvc.doPass();
}
}
}

View File

@ -6,34 +6,20 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.config.HapiJpaConfig; import ca.uhn.fhir.jpa.config.HapiJpaConfig;
import ca.uhn.fhir.jpa.config.r4.JpaR4Config; import ca.uhn.fhir.jpa.config.r4.JpaR4Config;
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil; import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
import ca.uhn.fhir.jpa.ips.api.IIpsGenerationStrategy;
import ca.uhn.fhir.jpa.ips.generator.IIpsGeneratorSvc;
import ca.uhn.fhir.jpa.ips.generator.IpsGeneratorSvcImpl;
import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider;
import ca.uhn.fhir.jpa.ips.strategy.DefaultIpsGenerationStrategy;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect; import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect; import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers;
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.IInstanceValidatorModule;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import ca.uhn.fhirtest.interceptor.PublicSecurityInterceptor; import ca.uhn.fhirtest.interceptor.PublicSecurityInterceptor;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.r5.utils.validation.constants.ReferenceValidationPolicy; import org.hl7.fhir.r5.utils.validation.constants.ReferenceValidationPolicy;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Lazy;
import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Primary;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.JpaTransactionManager;
@ -167,6 +153,11 @@ public class TestAuditConfig {
} }
@Bean
public OldAuditEventPurgeService oldEventPurgeService() {
return new OldAuditEventPurgeService();
}
@Bean @Bean
public DaoRegistryConfigurer daoRegistryConfigurer() { public DaoRegistryConfigurer daoRegistryConfigurer() {
return new DaoRegistryConfigurer(); return new DaoRegistryConfigurer();
@ -177,7 +168,6 @@ public class TestAuditConfig {
@Autowired @Autowired
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@PostConstruct @PostConstruct
public void start() { public void start() {
myDaoRegistry.setSupportedResourceTypes("AuditEvent", "SearchParameter", "Subscription"); myDaoRegistry.setSupportedResourceTypes("AuditEvent", "SearchParameter", "Subscription");

View File

@ -261,7 +261,9 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
* *
* @param theResource The FHIR resource object corresponding to the entity to reindex * @param theResource The FHIR resource object corresponding to the entity to reindex
* @param theEntity The storage entity to reindex * @param theEntity The storage entity to reindex
* @deprecated Use {@link #reindex(IResourcePersistentId, ReindexParameters, RequestDetails, TransactionDetails)}
*/ */
@Deprecated
void reindex(T theResource, IBasePersistedResource theEntity); void reindex(T theResource, IBasePersistedResource theEntity);
/** /**

View File

@ -21,17 +21,21 @@ package ca.uhn.fhir.storage.interceptor.balp;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.ThreadPoolUtil;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
/** /**
@ -41,16 +45,17 @@ import java.util.concurrent.atomic.AtomicLong;
* events will be converted automatically prior to sending. * events will be converted automatically prior to sending.
* <p> * <p>
* This sink transmits events asynchronously using an in-memory queue. This means that * This sink transmits events asynchronously using an in-memory queue. This means that
* in the event of a server shutdown data could be lost. * in the event of a server shutdown or unavailability of the target server <b>data could be lost</b>.
* </p> * </p>
*/ */
public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink implements IBalpAuditEventSink { public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink implements IBalpAuditEventSink {
private static final Logger ourLog = LoggerFactory.getLogger(AsyncMemoryQueueBackedFhirClientBalpSink.class); public static final IBaseResource[] EMPTY_RESOURCE_ARRAY = new IBaseResource[0];
private final ArrayBlockingQueue<IBaseResource> myQueue;
private static final AtomicLong ourNextThreadId = new AtomicLong(0); private static final AtomicLong ourNextThreadId = new AtomicLong(0);
private boolean myRunning; private static final Logger ourLog = LoggerFactory.getLogger(AsyncMemoryQueueBackedFhirClientBalpSink.class);
private TransmitterThread myThread; private final List<IBaseResource> myQueue = new ArrayList<>(100);
private final ThreadPoolTaskExecutor myThreadPool;
private final Runnable myTransmitterTask = new TransmitterTask();
/** /**
* Sets the FhirContext to use when initiating outgoing connections * Sets the FhirContext to use when initiating outgoing connections
@ -66,6 +71,7 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink
this(theFhirContext, theTargetBaseUrl, null); this(theFhirContext, theTargetBaseUrl, null);
} }
/** /**
* Sets the FhirContext to use when initiating outgoing connections * Sets the FhirContext to use when initiating outgoing connections
* *
@ -82,7 +88,6 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink
this(createClient(theFhirContext, theTargetBaseUrl, theClientInterceptors)); this(createClient(theFhirContext, theTargetBaseUrl, theClientInterceptors));
} }
/** /**
* Constructor * Constructor
* *
@ -90,76 +95,61 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink
*/ */
public AsyncMemoryQueueBackedFhirClientBalpSink(IGenericClient theClient) { public AsyncMemoryQueueBackedFhirClientBalpSink(IGenericClient theClient) {
super(theClient); super(theClient);
myQueue = new ArrayBlockingQueue<>(100); myThreadPool = ThreadPoolUtil.newThreadPool(1, 1, "BalpClientSink-" + ourNextThreadId.getAndIncrement() + "-", 100);
} }
@Override @Override
protected void recordAuditEvent(IBaseResource theAuditEvent) { protected void recordAuditEvent(IBaseResource theAuditEvent) {
myQueue.add(theAuditEvent); synchronized (myQueue) {
} myQueue.add(theAuditEvent);
@PostConstruct
public void start() {
if (!myRunning) {
myRunning = true;
myThread = new TransmitterThread();
myThread.start();
} }
myThreadPool.submit(myTransmitterTask);
} }
@PreDestroy @PreDestroy
public void stop() { public void stop() {
if (myRunning) { myThreadPool.shutdown();
myRunning = false;
myThread.interrupt();
}
} }
public boolean isRunning() { private class TransmitterTask implements Runnable {
return myThread != null && myThread.isRunning();
}
private class TransmitterThread extends Thread {
private boolean myThreadRunning;
public TransmitterThread() {
setName("BalpClientSink-" + ourNextThreadId.getAndIncrement());
}
@Override @Override
public void run() { public void run() {
ourLog.info("Starting BALP Client Sink Transmitter"); IBaseResource[] queue;
myThreadRunning = true; synchronized (myQueue) {
while (myRunning) { if (myQueue.isEmpty()) {
IBaseResource next = null; queue = EMPTY_RESOURCE_ARRAY;
try { } else {
next = myQueue.poll(10, TimeUnit.SECONDS); queue = myQueue.toArray(EMPTY_RESOURCE_ARRAY);
} catch (InterruptedException e) { myQueue.clear();
Thread.currentThread().interrupt();
} }
// TODO: Currently we transmit events one by one, but a nice optimization
// would be to batch them into FHIR transaction Bundles. If we do this, we
// would get better performance, but we'd also want to have some retry
// logic that submits events individually if a transaction fails.
if (next != null) {
try {
transmitEventToClient(next);
} catch (Exception e) {
ourLog.warn("Failed to transmit AuditEvent to sink: {}", e.toString());
myQueue.add(next);
}
}
} }
ourLog.info("Stopping BALP Client Sink Transmitter");
myThreadRunning = false;
}
public boolean isRunning() { if (queue.length == 0) {
return myThreadRunning; return;
}
BundleBuilder bundleBuilder = new BundleBuilder(myClient.getFhirContext());
for (IBaseResource next : queue) {
bundleBuilder.addTransactionCreateEntry(next);
}
IBaseBundle transactionBundle = bundleBuilder.getBundle();
try {
myClient.transaction().withBundle(transactionBundle).execute();
return;
} catch (BaseServerResponseException e) {
ourLog.error("Failed to transmit AuditEvent items to target. Will re-attempt {} failed events once. Error: {}", queue.length, e.toString());
}
// Retry once then give up
for (IBaseResource next : queue) {
try {
myClient.create().resource(next).execute();
} catch (BaseServerResponseException e) {
ourLog.error("Second failure uploading AuditEvent. Error: {}", e.toString());
}
}
} }
} }

View File

@ -32,7 +32,7 @@ import java.util.List;
public class FhirClientBalpSink implements IBalpAuditEventSink { public class FhirClientBalpSink implements IBalpAuditEventSink {
private final IGenericClient myClient; protected final IGenericClient myClient;
private final VersionCanonicalizer myVersionCanonicalizer; private final VersionCanonicalizer myVersionCanonicalizer;
/** /**

View File

@ -5,12 +5,15 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.IPointcut; import ca.uhn.fhir.interceptor.api.IPointcut;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.storage.interceptor.balp.AsyncMemoryQueueBackedFhirClientBalpSink;
import ca.uhn.fhir.test.utilities.server.HashMapResourceProviderExtension; import ca.uhn.fhir.test.utilities.server.HashMapResourceProviderExtension;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import org.hl7.fhir.dstu3.model.AuditEvent; import org.hl7.fhir.dstu3.model.AuditEvent;
import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.extension.RegisterExtension;
@ -23,26 +26,48 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThan;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class AsyncMemoryQueueBackedFhirClientBalpSinkTest { public class AsyncMemoryQueueBackedFhirClientBalpSinkTest {
@RegisterExtension @RegisterExtension
@Order(0) @Order(0)
private RestfulServerExtension myServer = new RestfulServerExtension(FhirVersionEnum.DSTU3); private RestfulServerExtension myServer = new RestfulServerExtension(FhirVersionEnum.DSTU3)
.withServer(t->t.registerProvider(new MySystemProvider()));
@RegisterExtension @RegisterExtension
@Order(1) @Order(1)
private HashMapResourceProviderExtension<AuditEvent> myAuditEventProvider = new HashMapResourceProviderExtension<>(myServer, AuditEvent.class); private HashMapResourceProviderExtension<AuditEvent> myAuditEventProvider = new HashMapResourceProviderExtension<>(myServer, AuditEvent.class);
@Test
public void testStressTest() {
AsyncMemoryQueueBackedFhirClientBalpSink sink = new AsyncMemoryQueueBackedFhirClientBalpSink(myServer.getFhirContext(), myServer.getBaseUrl());
try {
for (int i = 0; i < 10; i++) {
for (int j = 0; j < 100; j++) {
AuditEvent auditEvent = new AuditEvent();
auditEvent.setAction(AuditEvent.AuditEventAction.C);
sink.recordAuditEvent(auditEvent);
}
}
await().until(() -> myAuditEventProvider.getStoredResources().size(), equalTo(1000));
} finally {
sink.stop();
}
}
@Test @Test
public void recordAuditEvent() { public void recordAuditEvent() {
// Setup // Setup
AsyncMemoryQueueBackedFhirClientBalpSink sink = new AsyncMemoryQueueBackedFhirClientBalpSink(myServer.getFhirContext(), myServer.getBaseUrl()); AsyncMemoryQueueBackedFhirClientBalpSink sink = new AsyncMemoryQueueBackedFhirClientBalpSink(myServer.getFhirContext(), myServer.getBaseUrl());
sink.start();
try { try {
org.hl7.fhir.r4.model.AuditEvent auditEvent1 = new org.hl7.fhir.r4.model.AuditEvent(); org.hl7.fhir.r4.model.AuditEvent auditEvent1 = new org.hl7.fhir.r4.model.AuditEvent();
auditEvent1.addEntity().setWhat(new Reference("Patient/123")); auditEvent1.addEntity().setWhat(new org.hl7.fhir.r4.model.Reference("Patient/123"));
org.hl7.fhir.r4.model.AuditEvent auditEvent2 = new org.hl7.fhir.r4.model.AuditEvent(); org.hl7.fhir.r4.model.AuditEvent auditEvent2 = new org.hl7.fhir.r4.model.AuditEvent();
auditEvent2.addEntity().setWhat(new Reference("Patient/456")); auditEvent2.addEntity().setWhat(new org.hl7.fhir.r4.model.Reference("Patient/456"));
// Test // Test
sink.recordAuditEvent(auditEvent1); sink.recordAuditEvent(auditEvent1);
@ -58,7 +83,6 @@ public class AsyncMemoryQueueBackedFhirClientBalpSinkTest {
assertThat(whats, containsInAnyOrder("Patient/123", "Patient/456")); assertThat(whats, containsInAnyOrder("Patient/123", "Patient/456"));
} finally { } finally {
sink.stop(); sink.stop();
await().until(sink::isRunning, equalTo(false));
} }
} }
@ -66,18 +90,17 @@ public class AsyncMemoryQueueBackedFhirClientBalpSinkTest {
public void recordAuditEvent_AutoRetry() { public void recordAuditEvent_AutoRetry() {
// Setup // Setup
AsyncMemoryQueueBackedFhirClientBalpSink sink = new AsyncMemoryQueueBackedFhirClientBalpSink(myServer.getFhirContext(), myServer.getBaseUrl()); AsyncMemoryQueueBackedFhirClientBalpSink sink = new AsyncMemoryQueueBackedFhirClientBalpSink(myServer.getFhirContext(), myServer.getBaseUrl());
sink.start();
try { try {
org.hl7.fhir.r4.model.AuditEvent auditEvent1 = new org.hl7.fhir.r4.model.AuditEvent(); org.hl7.fhir.r4.model.AuditEvent auditEvent1 = new org.hl7.fhir.r4.model.AuditEvent();
auditEvent1.addEntity().setWhat(new Reference("Patient/123")); auditEvent1.addEntity().setWhat(new org.hl7.fhir.r4.model.Reference("Patient/123"));
org.hl7.fhir.r4.model.AuditEvent auditEvent2 = new org.hl7.fhir.r4.model.AuditEvent(); org.hl7.fhir.r4.model.AuditEvent auditEvent2 = new org.hl7.fhir.r4.model.AuditEvent();
auditEvent2.addEntity().setWhat(new Reference("Patient/456")); auditEvent2.addEntity().setWhat(new org.hl7.fhir.r4.model.Reference("Patient/456"));
AtomicInteger counter = new AtomicInteger(10); AtomicInteger counter = new AtomicInteger(1);
myServer.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, new IAnonymousInterceptor() { myServer.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, new IAnonymousInterceptor() {
@Override @Override
public void invoke(IPointcut thePointcut, HookParams theArgs) { public void invoke(IPointcut thePointcut, HookParams theArgs) {
if (counter.decrementAndGet() > 0) { if (counter.getAndDecrement() > 0) {
throw new InternalErrorException("Intentional error for unit test"); throw new InternalErrorException("Intentional error for unit test");
} }
} }
@ -98,9 +121,30 @@ public class AsyncMemoryQueueBackedFhirClientBalpSinkTest {
assertThat(whats.toString(), whats, containsInAnyOrder("Patient/123", "Patient/456")); assertThat(whats.toString(), whats, containsInAnyOrder("Patient/123", "Patient/456"));
} finally { } finally {
sink.stop(); sink.stop();
await().until(sink::isRunning, equalTo(false));
} }
} }
public class MySystemProvider {
@Transaction
public Bundle transaction(@TransactionParam Bundle theInput, RequestDetails theRequestDetails) {
Bundle retVal = new Bundle();
retVal.setType(Bundle.BundleType.TRANSACTIONRESPONSE);
for (Bundle.BundleEntryComponent next : theInput.getEntry()) {
assertEquals(Bundle.HTTPVerb.POST, next.getRequest().getMethod());
assertEquals("AuditEvent", next.getRequest().getUrl());
AuditEvent resource = (AuditEvent) next.getResource();
IIdType outcome = myAuditEventProvider.create(resource, theRequestDetails).getId();
Bundle.BundleEntryComponent respEntry = retVal.addEntry();
respEntry.getResponse().setStatus("201 Created");
respEntry.getResponse().setLocation(outcome.getValue());
}
return retVal;
}
}
} }

View File

@ -21,6 +21,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.jena.base.Sys;
import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.servlet.ServletHolder;
@ -59,7 +60,7 @@ public class DateRangeParamR4Test {
private static final SimpleDateFormat ourFmtUpperForTime; private static final SimpleDateFormat ourFmtUpperForTime;
private static final Logger ourLog = LoggerFactory.getLogger(DateRangeParamR4Test.class); private static final Logger ourLog = LoggerFactory.getLogger(DateRangeParamR4Test.class);
private static CloseableHttpClient ourClient; private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = FhirContext.forR4(); private static FhirContext ourCtx = FhirContext.forR4Cached();
private static DateRangeParam ourLastDateRange; private static DateRangeParam ourLastDateRange;
private static int ourPort; private static int ourPort;
private static Server ourServer; private static Server ourServer;
@ -315,8 +316,8 @@ public class DateRangeParamR4Test {
@Test @Test
public void testSetBoundsWithDatesExclusive() { public void testSetBoundsWithDatesExclusive() {
DateRangeParam range = new DateRangeParam(); DateRangeParam range = new DateRangeParam();
range.setLowerBoundExclusive(new Date()); range.setLowerBoundExclusive(new Date(System.currentTimeMillis()));
range.setUpperBoundExclusive(new Date()); range.setUpperBoundExclusive(new Date(System.currentTimeMillis() + 1000));
assertEquals(ParamPrefixEnum.GREATERTHAN, range.getLowerBound().getPrefix()); assertEquals(ParamPrefixEnum.GREATERTHAN, range.getLowerBound().getPrefix());
assertEquals(ParamPrefixEnum.LESSTHAN, range.getUpperBound().getPrefix()); assertEquals(ParamPrefixEnum.LESSTHAN, range.getUpperBound().getPrefix());

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
import ca.uhn.fhir.rest.annotation.IncludeParam; import ca.uhn.fhir.rest.annotation.IncludeParam;
import ca.uhn.fhir.rest.annotation.OptionalParam; import ca.uhn.fhir.rest.annotation.OptionalParam;
import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.annotation.Search;
@ -77,6 +78,7 @@ public class SearchR4Test {
ourLastMethod = null; ourLastMethod = null;
ourIdentifiers = null; ourIdentifiers = null;
myPort = myRestfulServerExtension.getPort(); myPort = myRestfulServerExtension.getPort();
myCtx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
} }
private Bundle executeSearchAndValidateHasLinkNext(HttpGet httpGet, EncodingEnum theExpectEncoding) throws IOException { private Bundle executeSearchAndValidateHasLinkNext(HttpGet httpGet, EncodingEnum theExpectEncoding) throws IOException {
@ -139,13 +141,13 @@ public class SearchR4Test {
String linkSelf; String linkSelf;
// Initial search // Initial search
httpGet = new HttpGet("http://localhost:" + myPort + "/Patient?identifier=foo%7Cbar&_elements=name&_elements:exclude=birthDate,active"); httpGet = new HttpGet("http://localhost:" + myPort + "/Patient?identifier=foo%7Cbar&_elements=identifier,name&_elements:exclude=birthDate,active");
bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON); bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON);
assertThat(toJson(bundle), not(containsString("\"active\""))); assertThat(toJson(bundle), not(containsString("\"active\"")));
linkSelf = bundle.getLink(Constants.LINK_SELF).getUrl(); linkSelf = bundle.getLink(Constants.LINK_SELF).getUrl();
assertThat(linkSelf, containsString("_elements=name")); assertThat(linkSelf, containsString("_elements=identifier%2Cname"));
linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl(); linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl();
assertThat(linkNext, containsString("_elements=name")); assertThat(linkNext, containsString("_elements=identifier,name"));
ourLog.info(toJson(bundle)); ourLog.info(toJson(bundle));
@ -154,7 +156,7 @@ public class SearchR4Test {
bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON); bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON);
assertThat(toJson(bundle), not(containsString("\"active\""))); assertThat(toJson(bundle), not(containsString("\"active\"")));
linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl(); linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl();
assertThat(linkNext, containsString("_elements=name")); assertThat(linkNext, containsString("_elements=identifier,name"));
assertThat(linkNext, containsString("_elements:exclude=active,birthDate")); assertThat(linkNext, containsString("_elements:exclude=active,birthDate"));
// Fetch the next page // Fetch the next page
@ -162,7 +164,7 @@ public class SearchR4Test {
bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON); bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON);
assertThat(toJson(bundle), not(containsString("\"active\""))); assertThat(toJson(bundle), not(containsString("\"active\"")));
linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl(); linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl();
assertThat(linkNext, containsString("_elements=name")); assertThat(linkNext, containsString("_elements=identifier,name"));
assertThat(linkNext, containsString("_elements:exclude=active,birthDate")); assertThat(linkNext, containsString("_elements:exclude=active,birthDate"));
// Fetch the next page // Fetch the next page
@ -170,7 +172,7 @@ public class SearchR4Test {
bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON); bundle = executeSearchAndValidateHasLinkNext(httpGet, EncodingEnum.JSON);
assertThat(toJson(bundle), not(containsString("\"active\""))); assertThat(toJson(bundle), not(containsString("\"active\"")));
linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl(); linkNext = bundle.getLink(Constants.LINK_NEXT).getUrl();
assertThat(linkNext, containsString("_elements=name")); assertThat(linkNext, containsString("_elements=identifier,name"));
assertThat(linkNext, containsString("_elements:exclude=active,birthDate")); assertThat(linkNext, containsString("_elements:exclude=active,birthDate"));
} }
@ -519,6 +521,7 @@ public class SearchR4Test {
patient.getIdElement().setValue("Patient/" + i + "/_history/222"); patient.getIdElement().setValue("Patient/" + i + "/_history/222");
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(patient, BundleEntrySearchModeEnum.INCLUDE); ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(patient, BundleEntrySearchModeEnum.INCLUDE);
patient.addName(new HumanName().setFamily("FAMILY")); patient.addName(new HumanName().setFamily("FAMILY"));
patient.addIdentifier().setSystem("http://foo").setValue("bar");
patient.setActive(true); patient.setActive(true);
retVal.add(patient); retVal.add(patient);
} }

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.context.api.BundleInclusionRule;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
import ca.uhn.fhir.rest.annotation.GraphQL; import ca.uhn.fhir.rest.annotation.GraphQL;
import ca.uhn.fhir.rest.annotation.GraphQLQueryUrl; import ca.uhn.fhir.rest.annotation.GraphQLQueryUrl;
import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.IdParam;
@ -105,6 +106,7 @@ public class ResponseHighlightingInterceptorTest {
ourInterceptor.setShowRequestHeaders(defaults.isShowRequestHeaders()); ourInterceptor.setShowRequestHeaders(defaults.isShowRequestHeaders());
ourInterceptor.setShowResponseHeaders(defaults.isShowResponseHeaders()); ourInterceptor.setShowResponseHeaders(defaults.isShowResponseHeaders());
ourInterceptor.setShowNarrative(defaults.isShowNarrative()); ourInterceptor.setShowNarrative(defaults.isShowNarrative());
ourCtx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
} }
/** /**
@ -310,7 +312,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_JSON_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -323,7 +325,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -336,7 +338,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -349,7 +351,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_XML_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -362,7 +364,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -375,7 +377,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test
@ -390,9 +392,8 @@ public class ResponseHighlightingInterceptorTest {
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html")); assertThat(responseContent, containsString("<html"));
assertThat(responseContent, containsString(">{<")); assertThat(responseContent, containsString(">{<"));
assertThat(responseContent, not(containsString("&lt;")));
assertThat(responseContent, containsString(Constants.HEADER_REQUEST_ID)); assertThat(responseContent, containsString(Constants.HEADER_REQUEST_ID));
} }
@ -410,7 +411,7 @@ public class ResponseHighlightingInterceptorTest {
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html")); assertThat(responseContent, containsString("<html"));
assertThat(responseContent, containsString("<span class='hlQuot'>&quot;urn:hapitest:mrns&quot;</span>")); assertThat(responseContent, containsString("<span class='hlQuot'>&quot;urn:hapitest:mrns&quot;</span>"));
assertThat(responseContent, containsString(Constants.HEADER_REQUEST_ID)); assertThat(responseContent, containsString(Constants.HEADER_REQUEST_ID));
@ -426,9 +427,8 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html")); assertThat(responseContent, containsString("<html"));
assertThat(responseContent, containsString(">{<")); assertThat(responseContent, containsString(">{<"));
assertThat(responseContent, not(containsString("&lt;")));
ourLog.info(responseContent); ourLog.info(responseContent);
} }
@ -443,7 +443,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html")); assertThat(responseContent, containsString("<html"));
assertThat(responseContent, not(containsString(">{<"))); assertThat(responseContent, not(containsString(">{<")));
assertThat(responseContent, containsString("&lt;")); assertThat(responseContent, containsString("&lt;"));
} }
@ -458,7 +458,7 @@ public class ResponseHighlightingInterceptorTest {
status.close(); status.close();
assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase()); assertEquals(Constants.CT_FHIR_JSON_NEW + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html"))); assertThat(responseContent, not(containsString("<html")));
} }
@Test @Test

View File

@ -112,12 +112,12 @@
<module>hapi-fhir-jpaserver-ips</module> <module>hapi-fhir-jpaserver-ips</module>
<module>hapi-fhir-jpaserver-mdm</module> <module>hapi-fhir-jpaserver-mdm</module>
<module>hapi-fhir-testpage-overlay</module> <module>hapi-fhir-testpage-overlay</module>
<!-- <module>hapi-fhir-jpaserver-uhnfhirtest</module>--> <module>hapi-fhir-jpaserver-uhnfhirtest</module>
<module>hapi-fhir-client-okhttp</module> <module>hapi-fhir-client-okhttp</module>
<module>hapi-fhir-android</module> <module>hapi-fhir-android</module>
<module>hapi-fhir-cli</module> <module>hapi-fhir-cli</module>
<module>hapi-fhir-dist</module> <module>hapi-fhir-dist</module>
<!-- <module>tests/hapi-fhir-base-test-jaxrsserver-kotlin</module>--> <module>tests/hapi-fhir-base-test-jaxrsserver-kotlin</module>
<module>tests/hapi-fhir-base-test-mindeps-client</module> <module>tests/hapi-fhir-base-test-mindeps-client</module>
<module>tests/hapi-fhir-base-test-mindeps-server</module> <module>tests/hapi-fhir-base-test-mindeps-server</module>
<module>hapi-fhir-spring-boot</module> <module>hapi-fhir-spring-boot</module>

View File

@ -1,4 +1,5 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<!-- Note: HAPI projects use the "hapi-fhir" POM as their base to provide easy management. You do not need to use this in your own projects, so the "parent" tag and it's contents below may be removed <!-- Note: HAPI projects use the "hapi-fhir" POM as their base to provide easy management. You do not need to use this in your own projects, so the "parent" tag and it's contents below may be removed
@ -12,7 +13,7 @@
</parent> </parent>
<artifactId>hapi-fhir-base-test-jaxrsserver-kotlin</artifactId> <artifactId>hapi-fhir-base-test-jaxrsserver-kotlin</artifactId>
<packaging>war</packaging> <packaging>jar</packaging>
<name>HAPI FHIR JAX-RS Server Kotlin test</name> <name>HAPI FHIR JAX-RS Server Kotlin test</name>
@ -80,12 +81,12 @@
<groupId>ch.qos.logback</groupId> <groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId> <artifactId>logback-classic</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-test-utilities</artifactId> <artifactId>hapi-fhir-test-utilities</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jetbrains.kotlin</groupId> <groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-maven-allopen</artifactId> <artifactId>kotlin-maven-allopen</artifactId>
@ -132,7 +133,9 @@
<executions> <executions>
<execution> <execution>
<id>compile</id> <id>compile</id>
<goals> <goal>compile</goal> </goals> <goals>
<goal>compile</goal>
</goals>
<configuration> <configuration>
<sourceDirs> <sourceDirs>
<sourceDir>${project.basedir}/src/main/kotlin</sourceDir> <sourceDir>${project.basedir}/src/main/kotlin</sourceDir>
@ -142,7 +145,9 @@
</execution> </execution>
<execution> <execution>
<id>test-compile</id> <id>test-compile</id>
<goals> <goal>test-compile</goal> </goals> <goals>
<goal>test-compile</goal>
</goals>
<configuration> <configuration>
<sourceDirs> <sourceDirs>
<sourceDir>${project.basedir}/src/test/kotlin</sourceDir> <sourceDir>${project.basedir}/src/test/kotlin</sourceDir>
@ -155,6 +160,43 @@
</plugins> </plugins>
</build> </build>
<profiles>
<profile>
<id>DIST</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>war</goal>
</goals>
</execution>
</executions>
<configuration>
<archive>
<manifestEntries>
<Build-Time>${maven.build.timestamp}</Build-Time>
</manifestEntries>
</archive>
<overlays>
<overlay>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId>
<excludes>
<exclude>WEB-INF/classes/**/*.class</exclude>
</excludes>
</overlay>
</overlays>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<properties> <properties>
<!--<kotlin.compiler.incremental>true</kotlin.compiler.incremental>--> <!--<kotlin.compiler.incremental>true</kotlin.compiler.incremental>-->
<kotlin.version>1.6.21</kotlin.version> <kotlin.version>1.6.21</kotlin.version>