Compare commits
7 Commits
5835f4fa67
...
5e26450fd9
Author | SHA1 | Date |
---|---|---|
TipzCM | 5e26450fd9 | |
leif stawnyczy | fd56f1462f | |
leif stawnyczy | 68da721033 | |
leif stawnyczy | ff28eae43c | |
volodymyr-korzh | a5f3878392 | |
Brenin Rhodes | ffd1287a06 | |
volodymyr-korzh | c0de4cd88b |
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6451
|
||||
jira: SMILE-9089
|
||||
title: "Previously, activating `BulkDataImport` job would not change jobs status to `RUNNING`,
|
||||
causing it to be processed multiple times instead of single time. This has been fixed."
|
|
@ -217,6 +217,12 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
|||
String biJobId = null;
|
||||
try {
|
||||
biJobId = processJob(bulkImportJobEntity);
|
||||
// set job status to RUNNING so it would not be processed again
|
||||
myTxTemplate.execute(t -> {
|
||||
bulkImportJobEntity.setStatus(BulkImportJobStatusEnum.RUNNING);
|
||||
myJobDao.save(bulkImportJobEntity);
|
||||
return null;
|
||||
});
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failure while preparing bulk export extract", e);
|
||||
myTxTemplate.execute(t -> {
|
||||
|
@ -256,6 +262,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public JobInfo getJobStatus(String theBiJobId) {
|
||||
BulkImportJobEntity theJob = findJobByBiJobId(theBiJobId);
|
||||
return new JobInfo()
|
||||
|
|
|
@ -1162,10 +1162,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
if (myFulltextSearchSvc != null && !myFulltextSearchSvc.isDisabled() && changed.isChanged()) {
|
||||
// set the lastUpdated dates so we can use them to search in lucene
|
||||
if (theResource.getMeta().getLastUpdated() == null) {
|
||||
theResource.getMeta().setLastUpdated(theTransactionDetails.getTransactionDate());
|
||||
}
|
||||
populateFullTextFields(myContext, theResource, entity, newParams);
|
||||
}
|
||||
} else {
|
||||
|
@ -1200,16 +1196,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
* Save the resource itself
|
||||
*/
|
||||
if (entity.getId() == null) {
|
||||
// create
|
||||
myEntityManager.persist(entity);
|
||||
|
||||
postPersist(entity, (T) theResource, theRequest);
|
||||
|
||||
} else if (entity.getDeleted() != null) {
|
||||
// delete
|
||||
entity = myEntityManager.merge(entity);
|
||||
|
||||
postDelete(entity);
|
||||
|
||||
} else {
|
||||
// update
|
||||
entity = myEntityManager.merge(entity);
|
||||
|
||||
postUpdate(entity, (T) theResource, theRequest);
|
||||
|
@ -1863,7 +1862,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing()) {
|
||||
ExtendedHSearchIndexData hSearchIndexData =
|
||||
myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
|
||||
myFulltextSearchSvc.extractLuceneIndexData(theResource, theEntity, theNewParams);
|
||||
theEntity.setLuceneIndexData(hSearchIndexData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -135,12 +135,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
@Override
|
||||
public ExtendedHSearchIndexData extractLuceneIndexData(
|
||||
IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType);
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
return extractor.extract(theResource, theNewParams);
|
||||
return extractor.extract(theResource, theEntity, theNewParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -89,7 +89,7 @@ public interface IFulltextSearchSvc {
|
|||
boolean isDisabled();
|
||||
|
||||
ExtendedHSearchIndexData extractLuceneIndexData(
|
||||
IBaseResource theResource, ResourceIndexedSearchParams theNewParams);
|
||||
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams);
|
||||
|
||||
/**
|
||||
* Returns true if the parameter map can be handled for hibernate search.
|
||||
|
|
|
@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.model.search.CompositeSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.model.search.DateSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
|
||||
|
@ -37,6 +39,7 @@ import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
|||
import ca.uhn.fhir.util.MetaUtil;
|
||||
import com.google.common.base.Strings;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -73,9 +76,10 @@ public class ExtendedHSearchIndexExtractor {
|
|||
mySearchParamExtractor = theSearchParamExtractor;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public ExtendedHSearchIndexData extract(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
ExtendedHSearchIndexData retVal = new ExtendedHSearchIndexData(myContext, myJpaStorageSettings, theResource);
|
||||
public ExtendedHSearchIndexData extract(
|
||||
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
ExtendedHSearchIndexData retVal =
|
||||
new ExtendedHSearchIndexData(myContext, myJpaStorageSettings, theResource, theEntity);
|
||||
|
||||
if (myJpaStorageSettings.isStoreResourceInHSearchIndex()) {
|
||||
retVal.setRawResourceData(myContext.newJsonParser().encodeResourceToString(theResource));
|
||||
|
@ -113,11 +117,27 @@ public class ExtendedHSearchIndexExtractor {
|
|||
.filter(nextParam -> !nextParam.isMissing())
|
||||
.forEach(nextParam -> retVal.addUriIndexData(nextParam.getParamName(), nextParam.getUri()));
|
||||
|
||||
theResource.getMeta().getTag().forEach(tag -> retVal.addTokenIndexData("_tag", tag));
|
||||
theEntity.getTags().forEach(tag -> {
|
||||
TagDefinition td = tag.getTag();
|
||||
|
||||
theResource.getMeta().getSecurity().forEach(sec -> retVal.addTokenIndexData("_security", sec));
|
||||
|
||||
theResource.getMeta().getProfile().forEach(prof -> retVal.addUriIndexData("_profile", prof.getValue()));
|
||||
IBaseCoding coding = (IBaseCoding) myContext.getVersion().newCodingDt();
|
||||
coding.setVersion(td.getVersion());
|
||||
coding.setDisplay(td.getDisplay());
|
||||
coding.setCode(td.getCode());
|
||||
coding.setSystem(td.getSystem());
|
||||
coding.setUserSelected(ObjectUtils.defaultIfNull(td.getUserSelected(), false));
|
||||
switch (td.getTagType()) {
|
||||
case TAG:
|
||||
retVal.addTokenIndexData("_tag", coding);
|
||||
break;
|
||||
case PROFILE:
|
||||
retVal.addUriIndexData("_profile", coding.getCode());
|
||||
break;
|
||||
case SECURITY_LABEL:
|
||||
retVal.addTokenIndexData("_security", coding);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
String source = MetaUtil.getSource(myContext, theResource.getMeta());
|
||||
if (isNotBlank(source)) {
|
||||
|
@ -127,20 +147,14 @@ public class ExtendedHSearchIndexExtractor {
|
|||
theNewParams.myCompositeParams.forEach(nextParam ->
|
||||
retVal.addCompositeIndexData(nextParam.getSearchParamName(), buildCompositeIndexData(nextParam)));
|
||||
|
||||
if (theResource.getMeta().getLastUpdated() != null) {
|
||||
int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(
|
||||
theResource.getMeta().getLastUpdated())
|
||||
if (theEntity.getUpdated() != null && !theEntity.getUpdated().isEmpty()) {
|
||||
int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(theEntity.getUpdatedDate())
|
||||
.intValue();
|
||||
retVal.addDateIndexData(
|
||||
"_lastUpdated",
|
||||
theResource.getMeta().getLastUpdated(),
|
||||
ordinal,
|
||||
theResource.getMeta().getLastUpdated(),
|
||||
ordinal);
|
||||
"_lastUpdated", theEntity.getUpdatedDate(), ordinal, theEntity.getUpdatedDate(), ordinal);
|
||||
}
|
||||
|
||||
if (!theNewParams.myLinks.isEmpty()) {
|
||||
|
||||
// awkwardly, links are indexed by jsonpath, not by search param.
|
||||
// so we re-build the linkage.
|
||||
Map<String, List<String>> linkPathToParamName = new HashMap<>();
|
||||
|
|
|
@ -146,8 +146,7 @@ public class ExtendedHSearchSearchBuilder {
|
|||
|
||||
// not yet supported in HSearch
|
||||
myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE
|
||||
&&
|
||||
supportsLastUpdated(myParams)
|
||||
&& supportsLastUpdated(myParams)
|
||||
&& // ???
|
||||
myParams.entrySet().stream()
|
||||
.filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey()))
|
||||
|
@ -164,8 +163,10 @@ public class ExtendedHSearchSearchBuilder {
|
|||
|
||||
DateRangeParam lastUpdated = theMap.getLastUpdated();
|
||||
|
||||
return lastUpdated.getLowerBound() != null && isParamTypeSupported(lastUpdated.getLowerBound())
|
||||
&& lastUpdated.getUpperBound() != null && isParamTypeSupported(lastUpdated.getUpperBound());
|
||||
return lastUpdated.getLowerBound() != null
|
||||
&& isParamTypeSupported(lastUpdated.getLowerBound())
|
||||
&& lastUpdated.getUpperBound() != null
|
||||
&& isParamTypeSupported(lastUpdated.getUpperBound());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -108,7 +108,6 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I
|
|||
when(mySrd.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE)).thenReturn(Boolean.TRUE);
|
||||
}
|
||||
|
||||
|
||||
@AfterEach
|
||||
public void after() {
|
||||
myStorageSettings.setMaximumExpansionSize(JpaStorageSettings.DEFAULT_MAX_EXPANSION_SIZE);
|
||||
|
@ -217,7 +216,6 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I
|
|||
}
|
||||
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(CS_URL, additions);
|
||||
|
||||
|
||||
// Codes available exceeds the max
|
||||
myStorageSettings.setMaximumExpansionSize(50);
|
||||
ValueSet vs = new ValueSet();
|
||||
|
@ -236,7 +234,6 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I
|
|||
include.setSystem(CS_URL);
|
||||
ValueSet outcome = myTermSvc.expandValueSet(null, vs);
|
||||
assertThat(outcome.getExpansion().getContains()).hasSize(109);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -10,15 +10,15 @@
|
|||
</root>
|
||||
<!--Uncomment the below if you are doing Elasticsearch debugging -->
|
||||
|
||||
<logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
<logger name="org.elasticsearch.client" level="debug" additivity="false">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
<logger name="tracer" level="TRACE" additivity="false">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
<!-- <logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace">-->
|
||||
<!-- <appender-ref ref="STDOUT" />-->
|
||||
<!-- </logger>-->
|
||||
<!-- <logger name="org.elasticsearch.client" level="debug" additivity="false">-->
|
||||
<!-- <appender-ref ref="STDOUT" />-->
|
||||
<!-- </logger>-->
|
||||
<!-- <logger name="tracer" level="TRACE" additivity="false">-->
|
||||
<!-- <appender-ref ref="STDOUT" />-->
|
||||
<!-- </logger>-->
|
||||
|
||||
|
||||
</configuration>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.model.search;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.model.dstu2.composite.CodingDt;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
|
@ -57,12 +58,17 @@ public class ExtendedHSearchIndexData {
|
|||
private String myForcedId;
|
||||
private String myResourceJSON;
|
||||
private IBaseResource myResource;
|
||||
private ResourceTable myEntity;
|
||||
|
||||
public ExtendedHSearchIndexData(
|
||||
FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) {
|
||||
FhirContext theFhirContext,
|
||||
StorageSettings theStorageSettings,
|
||||
IBaseResource theResource,
|
||||
ResourceTable theEntity) {
|
||||
this.myFhirContext = theFhirContext;
|
||||
this.myStorageSettings = theStorageSettings;
|
||||
myResource = theResource;
|
||||
myEntity = theEntity;
|
||||
}
|
||||
|
||||
private <V> BiConsumer<String, V> ifNotContained(BiConsumer<String, V> theIndexWriter) {
|
||||
|
|
|
@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
|||
import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
|
||||
|
@ -166,6 +167,8 @@ public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuil
|
|||
|
||||
ActivateJobResult activateJobOutcome = mySvc.activateNextReadyJob();
|
||||
assertTrue(activateJobOutcome.isActivated);
|
||||
// validate that job changed status from READY to RUNNING
|
||||
assertEquals(BulkImportJobStatusEnum.RUNNING, mySvc.getJobStatus(jobId).getStatus());
|
||||
|
||||
JobInstance instance = myBatch2JobHelper.awaitJobCompletion(activateJobOutcome.jobId, 60);
|
||||
assertNotNull(instance);
|
||||
|
@ -196,6 +199,8 @@ public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuil
|
|||
|
||||
ActivateJobResult activateJobOutcome = mySvc.activateNextReadyJob();
|
||||
assertTrue(activateJobOutcome.isActivated);
|
||||
// validate that job changed status from READY to RUNNING
|
||||
assertEquals(BulkImportJobStatusEnum.RUNNING, mySvc.getJobStatus(jobId).getStatus());
|
||||
|
||||
JobInstance instance = myBatch2JobHelper.awaitJobCompletion(activateJobOutcome.jobId);
|
||||
assertNotNull(instance);
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
<logger name="ca.uhn.fhir.jpa.bulk" level="info"/>
|
||||
|
||||
<!-- more debugging -->
|
||||
<!-- <!–-->
|
||||
<!--
|
||||
<logger name="org.elasticsearch.client" level="trace"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" level="TRACE"/>
|
||||
<logger name="ca.uhn.fhir.jpa.model.search" level="debug"/>
|
||||
|
@ -39,7 +39,7 @@
|
|||
<logger name="org.hibernate.search" level="debug"/>
|
||||
<logger name="org.hibernate.search.query" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" level="TRACE"/>
|
||||
<!-- –>-->
|
||||
-->
|
||||
<!-- See https://docs.jboss.org/hibernate/stable/search/reference/en-US/html_single/#backend-lucene-io-writer-infostream for lucene logging
|
||||
<logger name="org.hibernate.search.backend.lucene.infostream" level="TRACE"/> -->
|
||||
|
||||
|
|
|
@ -51,3 +51,33 @@ INSERT INTO TRM_CONCEPT_DESIG (
|
|||
54,
|
||||
150
|
||||
);
|
||||
|
||||
INSERT INTO HFJ_RES_LINK (
|
||||
PID,
|
||||
PARTITION_DATE,
|
||||
PARTITION_ID,
|
||||
SRC_PATH,
|
||||
SRC_RESOURCE_ID,
|
||||
SOURCE_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_ID,
|
||||
TARGET_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_URL,
|
||||
TARGET_RESOURCE_VERSION,
|
||||
SP_UPDATED,
|
||||
TARGET_RES_PARTITION_ID,
|
||||
TARGET_RES_PARTITION_DATE
|
||||
) VALUES (
|
||||
702,
|
||||
'2024-11-05',
|
||||
1,
|
||||
'Observation.subject.where(resolve() is Patient)',
|
||||
1656,
|
||||
'Observation',
|
||||
1906,
|
||||
'Patient',
|
||||
'http://localhost:8000/Patient/123',
|
||||
1,
|
||||
'2024-11-01 18:01:12.921',
|
||||
1,
|
||||
'2024-11-05'
|
||||
);
|
||||
|
|
|
@ -51,3 +51,33 @@ INSERT INTO TRM_CONCEPT_DESIG (
|
|||
54,
|
||||
150
|
||||
);
|
||||
|
||||
INSERT INTO HFJ_RES_LINK (
|
||||
PID,
|
||||
PARTITION_DATE,
|
||||
PARTITION_ID,
|
||||
SRC_PATH,
|
||||
SRC_RESOURCE_ID,
|
||||
SOURCE_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_ID,
|
||||
TARGET_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_URL,
|
||||
TARGET_RESOURCE_VERSION,
|
||||
SP_UPDATED,
|
||||
TARGET_RES_PARTITION_ID,
|
||||
TARGET_RES_PARTITION_DATE
|
||||
) VALUES (
|
||||
702,
|
||||
'2024-11-05',
|
||||
1,
|
||||
'Observation.subject.where(resolve() is Patient)',
|
||||
1653,
|
||||
'Observation',
|
||||
1906,
|
||||
'Patient',
|
||||
'http://localhost:8000/Patient/123',
|
||||
1,
|
||||
'2024-11-01 18:01:12.921',
|
||||
1,
|
||||
'2024-11-05'
|
||||
);
|
||||
|
|
|
@ -51,3 +51,33 @@ INSERT INTO TRM_CONCEPT_DESIG (
|
|||
54,
|
||||
150
|
||||
);
|
||||
|
||||
INSERT INTO HFJ_RES_LINK (
|
||||
PID,
|
||||
PARTITION_DATE,
|
||||
PARTITION_ID,
|
||||
SRC_PATH,
|
||||
SRC_RESOURCE_ID,
|
||||
SOURCE_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_ID,
|
||||
TARGET_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_URL,
|
||||
TARGET_RESOURCE_VERSION,
|
||||
SP_UPDATED,
|
||||
TARGET_RES_PARTITION_ID,
|
||||
TARGET_RES_PARTITION_DATE
|
||||
) VALUES (
|
||||
702,
|
||||
SYSDATE,
|
||||
1,
|
||||
'Observation.subject.where(resolve() is Patient)',
|
||||
1653,
|
||||
'Observation',
|
||||
1906,
|
||||
'Patient',
|
||||
'http://localhost:8000/Patient/123',
|
||||
1,
|
||||
SYSDATE,
|
||||
1,
|
||||
SYSDATE
|
||||
);
|
||||
|
|
|
@ -51,3 +51,33 @@ INSERT INTO TRM_CONCEPT_DESIG (
|
|||
54,
|
||||
150
|
||||
);
|
||||
|
||||
INSERT INTO HFJ_RES_LINK (
|
||||
PID,
|
||||
PARTITION_DATE,
|
||||
PARTITION_ID,
|
||||
SRC_PATH,
|
||||
SRC_RESOURCE_ID,
|
||||
SOURCE_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_ID,
|
||||
TARGET_RESOURCE_TYPE,
|
||||
TARGET_RESOURCE_URL,
|
||||
TARGET_RESOURCE_VERSION,
|
||||
SP_UPDATED,
|
||||
TARGET_RES_PARTITION_ID,
|
||||
TARGET_RES_PARTITION_DATE
|
||||
) VALUES (
|
||||
702,
|
||||
'2024-11-05',
|
||||
1,
|
||||
'Observation.subject.where(resolve() is Patient)',
|
||||
1653,
|
||||
'Observation',
|
||||
1906,
|
||||
'Patient',
|
||||
'http://localhost:8000/Patient/123',
|
||||
1,
|
||||
'2024-11-01 18:01:12.921',
|
||||
1,
|
||||
'2024-11-05'
|
||||
);
|
||||
|
|
|
@ -74,6 +74,11 @@ public interface ILuceneSearchR4Test {
|
|||
Patient patient = new Patient();
|
||||
patient.getMeta()
|
||||
.addTag(system, code, "");
|
||||
patient.addName().addGiven("homer")
|
||||
.setFamily("simpson");
|
||||
patient.addAddress()
|
||||
.setCity("springfield")
|
||||
.addLine("742 evergreen terrace");
|
||||
Long id = patientDao.create(patient, requestDeatils).getId().toUnqualifiedVersionless().getIdPartAsLong();
|
||||
|
||||
// create base search map
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.CompositeSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.model.search.DateSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
|
||||
|
@ -54,7 +55,7 @@ class ExtendedHSearchIndexExtractorTest implements ITestDataBuilder.WithSupport
|
|||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams("Observation");
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myJpaStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
ExtendedHSearchIndexData indexData = extractor.extract(new Observation(), extractedParams);
|
||||
ExtendedHSearchIndexData indexData = extractor.extract(new Observation(), new ResourceTable(), extractedParams);
|
||||
|
||||
// validate
|
||||
Set<CompositeSearchIndexData> spIndexData = indexData.getSearchParamComposites().get("component-code-value-concept");
|
||||
|
@ -77,14 +78,13 @@ class ExtendedHSearchIndexExtractorTest implements ITestDataBuilder.WithSupport
|
|||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams("Patient");
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myJpaStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
ExtendedHSearchIndexData indexData = extractor.extract(new SearchParameter(), searchParams);
|
||||
ExtendedHSearchIndexData indexData = extractor.extract(new SearchParameter(), new ResourceTable(), searchParams);
|
||||
|
||||
// validate
|
||||
Set<DateSearchIndexData> dIndexData = indexData.getDateIndexData().get("Date");
|
||||
assertThat(dIndexData).hasSize(0);
|
||||
Set<QuantitySearchIndexData> qIndexData = indexData.getQuantityIndexData().get("Quantity");
|
||||
assertThat(qIndexData).hasSize(0);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,15 +21,20 @@ package ca.uhn.fhir.cr.config.dstu3;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.cr.config.CrProcessorConfig;
|
||||
import ca.uhn.fhir.cr.config.ProviderLoader;
|
||||
import ca.uhn.fhir.cr.config.ProviderSelector;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
@Configuration
|
||||
@Import(CrProcessorConfig.class)
|
||||
public class EvaluateOperationConfig {
|
||||
@Bean
|
||||
ca.uhn.fhir.cr.dstu3.library.LibraryEvaluateProvider dstu3LibraryEvaluateProvider() {
|
||||
|
|
|
@ -21,15 +21,20 @@ package ca.uhn.fhir.cr.config.r4;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.cr.config.CrProcessorConfig;
|
||||
import ca.uhn.fhir.cr.config.ProviderLoader;
|
||||
import ca.uhn.fhir.cr.config.ProviderSelector;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
@Configuration
|
||||
@Import(CrProcessorConfig.class)
|
||||
public class EvaluateOperationConfig {
|
||||
@Bean
|
||||
ca.uhn.fhir.cr.r4.library.LibraryEvaluateProvider r4LibraryEvaluateProvider() {
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
|
@ -66,18 +67,42 @@ public class DaoTestDataBuilder implements ITestDataBuilder.WithSupport, ITestDa
|
|||
}
|
||||
//noinspection rawtypes
|
||||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass());
|
||||
|
||||
// manipulate the transaction details to provide a fake transaction date
|
||||
TransactionDetails details = null;
|
||||
if (theResource.getMeta() != null && theResource.getMeta().getLastUpdated() != null) {
|
||||
details = new TransactionDetails(theResource.getMeta().getLastUpdated());
|
||||
} else {
|
||||
details = new TransactionDetails();
|
||||
}
|
||||
|
||||
//noinspection unchecked
|
||||
IIdType id = dao.create(theResource, mySrd).getId().toUnqualifiedVersionless();
|
||||
IIdType id = dao.create(theResource, null, true, mySrd, details)
|
||||
.getId().toUnqualifiedVersionless();
|
||||
myIds.put(theResource.fhirType(), id);
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType doUpdateResource(IBaseResource theResource) {
|
||||
// manipulate the transaction details to provdie a fake transaction date
|
||||
TransactionDetails details = null;
|
||||
if (theResource.getMeta() != null && theResource.getMeta().getLastUpdated() != null) {
|
||||
details = new TransactionDetails(theResource.getMeta().getLastUpdated());
|
||||
} else {
|
||||
details = new TransactionDetails();
|
||||
}
|
||||
|
||||
//noinspection rawtypes
|
||||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass());
|
||||
|
||||
//noinspection unchecked
|
||||
IIdType id = dao.update(theResource, mySrd).getId().toUnqualifiedVersionless();
|
||||
IIdType id = dao.update(theResource,
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
mySrd,
|
||||
details).getId().toUnqualifiedVersionless();
|
||||
myIds.put(theResource.fhirType(), id);
|
||||
return id;
|
||||
}
|
||||
|
|
|
@ -209,6 +209,12 @@ public interface ITestDataBuilder {
|
|||
return t -> ((IBaseResource)t).getMeta().setLastUpdated(theLastUpdated);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a _lastUpdated value.
|
||||
*
|
||||
* This value will also be used to control the transaction time, which is what determines
|
||||
* what the Updated date is.
|
||||
*/
|
||||
default ICreationArgument withLastUpdated(String theIsoDate) {
|
||||
return t -> ((IBaseResource)t).getMeta().setLastUpdated(new InstantType(theIsoDate).getValue());
|
||||
}
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -1054,7 +1054,7 @@
|
|||
<ucum_version>1.0.8</ucum_version>
|
||||
|
||||
<!-- Clinical Reasoning & CQL Support -->
|
||||
<clinical-reasoning.version>3.13.0</clinical-reasoning.version>
|
||||
<clinical-reasoning.version>3.13.1</clinical-reasoning.version>
|
||||
|
||||
<!-- Site properties -->
|
||||
<fontawesomeVersion>5.4.1</fontawesomeVersion>
|
||||
|
|
Loading…
Reference in New Issue