merge master
This commit is contained in:
commit
c7ef0917a7
|
@ -31,6 +31,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -80,6 +81,15 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
*/
|
||||
public static final ResourceMetadataKeyEnum<BundleEntrySearchModeEnum> ENTRY_SEARCH_MODE =
|
||||
new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_MODE", BundleEntrySearchModeEnum.class) {};
|
||||
/**
|
||||
* If present and populated with a decimal value, contains the "bundle entry search score", which is the value of the status field in the Bundle entry containing this resource.
|
||||
* The value for this key corresponds to field <code>Bundle.entry.search.score</code>. This value represents the search ranking score, where 1.0 is relevant and 0.0 is irrelevant.
|
||||
* <p>
|
||||
* Note that status is only used in FHIR DSTU2 and later.
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<BigDecimal> ENTRY_SEARCH_SCORE =
|
||||
new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_SCORE", BigDecimal.class) {};
|
||||
/**
|
||||
* If present and populated with a {@link BundleEntryTransactionMethodEnum}, contains the "bundle entry transaction operation", which is the value of the status field in the Bundle entry
|
||||
* containing this resource. The value for this key corresponds to field <code>Bundle.entry.transaction.operation</code>. This value can be set in resources being transmitted to a server to
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
|
@ -578,42 +579,53 @@ public class BundleUtil {
|
|||
BaseRuntimeElementCompositeDefinition<?> searchChildContentsDef =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) searchChildDef.getChildByName("search");
|
||||
BaseRuntimeChildDefinition searchModeChildDef = searchChildContentsDef.getChildByName("mode");
|
||||
BaseRuntimeChildDefinition searchScoreChildDef = searchChildContentsDef.getChildByName("score");
|
||||
|
||||
List<SearchBundleEntryParts> retVal = new ArrayList<>();
|
||||
for (IBase nextEntry : entries) {
|
||||
SearchBundleEntryParts parts = getSearchBundleEntryParts(
|
||||
fullUrlChildDef, resourceChildDef, searchChildDef, searchModeChildDef, nextEntry);
|
||||
fullUrlChildDef,
|
||||
resourceChildDef,
|
||||
searchChildDef,
|
||||
searchModeChildDef,
|
||||
searchScoreChildDef,
|
||||
nextEntry);
|
||||
retVal.add(parts);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static SearchBundleEntryParts getSearchBundleEntryParts(
|
||||
BaseRuntimeChildDefinition fullUrlChildDef,
|
||||
BaseRuntimeChildDefinition resourceChildDef,
|
||||
BaseRuntimeChildDefinition searchChildDef,
|
||||
BaseRuntimeChildDefinition searchModeChildDef,
|
||||
BaseRuntimeChildDefinition theFullUrlChildDef,
|
||||
BaseRuntimeChildDefinition theResourceChildDef,
|
||||
BaseRuntimeChildDefinition theSearchChildDef,
|
||||
BaseRuntimeChildDefinition theSearchModeChildDef,
|
||||
BaseRuntimeChildDefinition theSearchScoreChildDef,
|
||||
IBase entry) {
|
||||
IBaseResource resource = null;
|
||||
String matchMode = null;
|
||||
BigDecimal searchScore = null;
|
||||
|
||||
String fullUrl = fullUrlChildDef
|
||||
String fullUrl = theFullUrlChildDef
|
||||
.getAccessor()
|
||||
.getFirstValueOrNull(entry)
|
||||
.map(t -> ((IPrimitiveType<?>) t).getValueAsString())
|
||||
.orElse(null);
|
||||
|
||||
for (IBase nextResource : resourceChildDef.getAccessor().getValues(entry)) {
|
||||
for (IBase nextResource : theResourceChildDef.getAccessor().getValues(entry)) {
|
||||
resource = (IBaseResource) nextResource;
|
||||
}
|
||||
|
||||
for (IBase nextSearch : searchChildDef.getAccessor().getValues(entry)) {
|
||||
for (IBase nextUrl : searchModeChildDef.getAccessor().getValues(nextSearch)) {
|
||||
for (IBase nextSearch : theSearchChildDef.getAccessor().getValues(entry)) {
|
||||
for (IBase nextUrl : theSearchModeChildDef.getAccessor().getValues(nextSearch)) {
|
||||
matchMode = ((IPrimitiveType<?>) nextUrl).getValueAsString();
|
||||
}
|
||||
for (IBase nextUrl : theSearchScoreChildDef.getAccessor().getValues(nextSearch)) {
|
||||
searchScore = (BigDecimal) ((IPrimitiveType<?>) nextUrl).getValue();
|
||||
}
|
||||
}
|
||||
|
||||
return new SearchBundleEntryParts(fullUrl, resource, matchMode);
|
||||
return new SearchBundleEntryParts(fullUrl, resource, matchMode, searchScore);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,15 +22,20 @@ package ca.uhn.fhir.util.bundle;
|
|||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
public class SearchBundleEntryParts {
|
||||
private final IBaseResource myResource;
|
||||
private final BundleEntrySearchModeEnum mySearchMode;
|
||||
private final BigDecimal mySearchScore;
|
||||
private final String myFullUrl;
|
||||
|
||||
public SearchBundleEntryParts(String theFullUrl, IBaseResource theResource, String theSearchMode) {
|
||||
public SearchBundleEntryParts(
|
||||
String theFullUrl, IBaseResource theResource, String theSearchMode, BigDecimal theSearchScore) {
|
||||
myFullUrl = theFullUrl;
|
||||
myResource = theResource;
|
||||
mySearchMode = BundleEntrySearchModeEnum.forCode(theSearchMode);
|
||||
mySearchScore = theSearchScore;
|
||||
}
|
||||
|
||||
public String getFullUrl() {
|
||||
|
@ -44,4 +49,8 @@ public class SearchBundleEntryParts {
|
|||
public BundleEntrySearchModeEnum getSearchMode() {
|
||||
return mySearchMode;
|
||||
}
|
||||
|
||||
public BigDecimal getSearchScore() {
|
||||
return mySearchScore;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6010
|
||||
jira: SMILE-8214
|
||||
title: "When populated, the search score field will now be included in the entries of a response Bundle."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6033
|
||||
jira: SMILE-8429
|
||||
title: "Previously, attempting to store resources with common identifies but different partitions would.
|
||||
This has been fixed by adding a new configuration key defaulting to false to allow storing resources with duplicate identifiers across partitions.
|
||||
This new feature can be activated by calling PartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled()"
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6040
|
||||
title: "The `meta.profile` element on resources was not being respected as
|
||||
canonical (ie, allowing for a version to be appended, `http://example.com/StructureDefinition/abc|1.0.0`),
|
||||
and was thus being ignored during validation.
|
||||
This has been fixed.
|
||||
"
|
|
@ -80,6 +80,7 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
|||
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
||||
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
|
||||
|
@ -852,12 +853,14 @@ public class JpaConfig {
|
|||
PersistenceContextProvider thePersistenceContextProvider,
|
||||
IResourceSearchUrlDao theResourceSearchUrlDao,
|
||||
MatchUrlService theMatchUrlService,
|
||||
FhirContext theFhirContext) {
|
||||
FhirContext theFhirContext,
|
||||
PartitionSettings thePartitionSettings) {
|
||||
return new ResourceSearchUrlSvc(
|
||||
thePersistenceContextProvider.getEntityManager(),
|
||||
theResourceSearchUrlDao,
|
||||
theMatchUrlService,
|
||||
theFhirContext);
|
||||
theFhirContext,
|
||||
thePartitionSettings);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -128,7 +128,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
protected void init740() {
|
||||
// Start of migrations from 7.2 to 7.4
|
||||
|
||||
Builder version = forVersion(VersionEnum.V7_4_0);
|
||||
final Builder version = forVersion(VersionEnum.V7_4_0);
|
||||
|
||||
{
|
||||
version.onTable("HFJ_RES_SEARCH_URL")
|
||||
|
@ -348,6 +348,30 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 100)
|
||||
.failureAllowed();
|
||||
|
||||
{
|
||||
// Please see https://github.com/hapifhir/hapi-fhir/issues/6033 for why we're doing this
|
||||
version.onTable("HFJ_RES_SEARCH_URL")
|
||||
.addColumn("20240618.2", "PARTITION_ID", -1)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.INT);
|
||||
|
||||
version.onTable("HFJ_RES_SEARCH_URL")
|
||||
.addColumn("20240618.3", "PARTITION_DATE")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.DATE_ONLY);
|
||||
|
||||
version.executeRawSql("20240618.4", "UPDATE HFJ_RES_SEARCH_URL SET PARTITION_ID = -1");
|
||||
|
||||
version.onTable("HFJ_RES_SEARCH_URL")
|
||||
.modifyColumn("20240618.5", "PARTITION_ID")
|
||||
.nonNullable()
|
||||
.withType(ColumnTypeEnum.INT);
|
||||
|
||||
version.onTable("HFJ_RES_SEARCH_URL").dropPrimaryKey("20240618.6");
|
||||
|
||||
version.onTable("HFJ_RES_SEARCH_URL").addPrimaryKey("20240618.7", "RES_SEARCH_URL", "PARTITION_ID");
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
|
@ -51,16 +52,19 @@ public class ResourceSearchUrlSvc {
|
|||
private final MatchUrlService myMatchUrlService;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final PartitionSettings myPartitionSettings;
|
||||
|
||||
public ResourceSearchUrlSvc(
|
||||
EntityManager theEntityManager,
|
||||
IResourceSearchUrlDao theResourceSearchUrlDao,
|
||||
MatchUrlService theMatchUrlService,
|
||||
FhirContext theFhirContext) {
|
||||
FhirContext theFhirContext,
|
||||
PartitionSettings thePartitionSettings) {
|
||||
myEntityManager = theEntityManager;
|
||||
myResourceSearchUrlDao = theResourceSearchUrlDao;
|
||||
myMatchUrlService = theMatchUrlService;
|
||||
myFhirContext = theFhirContext;
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -87,8 +91,10 @@ public class ResourceSearchUrlSvc {
|
|||
String theResourceName, String theMatchUrl, ResourceTable theResourceTable) {
|
||||
String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl);
|
||||
|
||||
ResourceSearchUrlEntity searchUrlEntity =
|
||||
ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourceTable);
|
||||
ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(
|
||||
canonicalizedUrlForStorage,
|
||||
theResourceTable,
|
||||
myPartitionSettings.isConditionalCreateDuplicateIdentifiersEnabled());
|
||||
// calling dao.save performs a merge operation which implies a trip to
|
||||
// the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling
|
||||
// em.persist.
|
||||
|
|
|
@ -32,6 +32,7 @@ public class PartitionSettings {
|
|||
private boolean myUnnamedPartitionMode;
|
||||
private Integer myDefaultPartitionId;
|
||||
private boolean myAlwaysOpenNewTransactionForDifferentPartition;
|
||||
private boolean myConditionalCreateDuplicateIdentifiersEnabled = false;
|
||||
|
||||
/**
|
||||
* Should we always open a new database transaction if the partition context changes
|
||||
|
@ -171,6 +172,15 @@ public class PartitionSettings {
|
|||
PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
|
||||
}
|
||||
|
||||
public boolean isConditionalCreateDuplicateIdentifiersEnabled() {
|
||||
return myConditionalCreateDuplicateIdentifiersEnabled;
|
||||
}
|
||||
|
||||
public void setConditionalCreateDuplicateIdentifiersEnabled(
|
||||
boolean theConditionalCreateDuplicateIdentifiersEnabled) {
|
||||
myConditionalCreateDuplicateIdentifiersEnabled = theConditionalCreateDuplicateIdentifiersEnabled;
|
||||
}
|
||||
|
||||
public enum CrossPartitionReferenceMode {
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,10 +20,10 @@
|
|||
package ca.uhn.fhir.jpa.model.entity;
|
||||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.EmbeddedId;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ForeignKey;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.Index;
|
||||
import jakarta.persistence.JoinColumn;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
|
@ -31,7 +31,9 @@ import jakarta.persistence.Table;
|
|||
import jakarta.persistence.Temporal;
|
||||
import jakarta.persistence.TemporalType;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.Date;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* This entity is used to enforce uniqueness on a given search URL being
|
||||
|
@ -52,12 +54,12 @@ import java.util.Date;
|
|||
public class ResourceSearchUrlEntity {
|
||||
|
||||
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
|
||||
public static final String PARTITION_ID = "PARTITION_ID";
|
||||
|
||||
public static final int RES_SEARCH_URL_LENGTH = 768;
|
||||
|
||||
@Id
|
||||
@Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
|
||||
private String mySearchUrl;
|
||||
@EmbeddedId
|
||||
private ResourceSearchUrlEntityPK myPk;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(
|
||||
|
@ -70,17 +72,35 @@ public class ResourceSearchUrlEntity {
|
|||
@Column(name = "RES_ID", updatable = false, nullable = false, insertable = false)
|
||||
private Long myResourcePid;
|
||||
|
||||
@Column(name = "PARTITION_DATE", nullable = true, insertable = true, updatable = false)
|
||||
private LocalDate myPartitionDate;
|
||||
|
||||
@Column(name = "CREATED_TIME", nullable = false)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myCreatedTime;
|
||||
|
||||
public static ResourceSearchUrlEntity from(String theUrl, ResourceTable theResourceTable) {
|
||||
public static ResourceSearchUrlEntity from(
|
||||
String theUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
|
||||
return new ResourceSearchUrlEntity()
|
||||
.setPk(ResourceSearchUrlEntityPK.from(
|
||||
theUrl, theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled))
|
||||
.setPartitionDate(Optional.ofNullable(theResourceTable.getPartitionId())
|
||||
.map(PartitionablePartitionId::getPartitionDate)
|
||||
.orElse(null))
|
||||
.setResourceTable(theResourceTable)
|
||||
.setSearchUrl(theUrl)
|
||||
.setCreatedTime(new Date());
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntityPK getPk() {
|
||||
return myPk;
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setPk(ResourceSearchUrlEntityPK thePk) {
|
||||
myPk = thePk;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Long getResourcePid() {
|
||||
if (myResourcePid != null) {
|
||||
return myResourcePid;
|
||||
|
@ -112,11 +132,19 @@ public class ResourceSearchUrlEntity {
|
|||
}
|
||||
|
||||
public String getSearchUrl() {
|
||||
return mySearchUrl;
|
||||
return myPk.getSearchUrl();
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setSearchUrl(String theSearchUrl) {
|
||||
mySearchUrl = theSearchUrl;
|
||||
public Integer getPartitionId() {
|
||||
return myPk.getPartitionId();
|
||||
}
|
||||
|
||||
public LocalDate getPartitionDate() {
|
||||
return myPartitionDate;
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setPartitionDate(LocalDate thePartitionDate) {
|
||||
myPartitionDate = thePartitionDate;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.model.entity;
|
||||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Embeddable;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
/**
|
||||
* Multi-column primary Key for {@link ResourceSearchUrlEntity}
|
||||
*/
|
||||
@Embeddable
|
||||
public class ResourceSearchUrlEntityPK implements Serializable {
|
||||
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
|
||||
public static final String PARTITION_ID_COLUMN_NAME = "PARTITION_ID";
|
||||
|
||||
public static final int RES_SEARCH_URL_LENGTH = 768;
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final int PARTITION_ID_NULL_EQUIVALENT = -1;
|
||||
|
||||
@Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
|
||||
// Weird field name isto ensure that this the first key in the index
|
||||
private String my_A_SearchUrl;
|
||||
|
||||
@Column(name = PARTITION_ID_COLUMN_NAME, nullable = false, insertable = true, updatable = false)
|
||||
// Weird field name isto ensure that this the second key in the index
|
||||
private Integer my_B_PartitionId;
|
||||
|
||||
public ResourceSearchUrlEntityPK() {}
|
||||
|
||||
public static ResourceSearchUrlEntityPK from(
|
||||
String theSearchUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
return new ResourceSearchUrlEntityPK(
|
||||
theSearchUrl,
|
||||
computePartitionIdOrNullEquivalent(theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled));
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntityPK(String theSearchUrl, int thePartitionId) {
|
||||
my_A_SearchUrl = theSearchUrl;
|
||||
my_B_PartitionId = thePartitionId;
|
||||
}
|
||||
|
||||
public String getSearchUrl() {
|
||||
return my_A_SearchUrl;
|
||||
}
|
||||
|
||||
public void setSearchUrl(String theMy_A_SearchUrl) {
|
||||
my_A_SearchUrl = theMy_A_SearchUrl;
|
||||
}
|
||||
|
||||
public Integer getPartitionId() {
|
||||
return my_B_PartitionId;
|
||||
}
|
||||
|
||||
public void setPartitionId(Integer theMy_B_PartitionId) {
|
||||
my_B_PartitionId = theMy_B_PartitionId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ResourceSearchUrlEntityPK that = (ResourceSearchUrlEntityPK) theO;
|
||||
return Objects.equals(my_A_SearchUrl, that.my_A_SearchUrl)
|
||||
&& Objects.equals(my_B_PartitionId, that.my_B_PartitionId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(my_A_SearchUrl, my_B_PartitionId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringJoiner(", ", ResourceSearchUrlEntityPK.class.getSimpleName() + "[", "]")
|
||||
.add("my_A_SearchUrl='" + my_A_SearchUrl + "'")
|
||||
.add("my_B_PartitionId=" + my_B_PartitionId)
|
||||
.toString();
|
||||
}
|
||||
|
||||
private static int computePartitionIdOrNullEquivalent(
|
||||
ResourceTable theTheResourceTable, boolean theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
if (!theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
return PARTITION_ID_NULL_EQUIVALENT;
|
||||
}
|
||||
|
||||
return Optional.ofNullable(theTheResourceTable.getPartitionId())
|
||||
.map(PartitionablePartitionId::getPartitionId)
|
||||
.orElse(PARTITION_ID_NULL_EQUIVALENT);
|
||||
}
|
||||
}
|
|
@ -41,13 +41,14 @@ import java.util.stream.Collectors;
|
|||
import static java.util.Arrays.asList;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4ConcurrentCreateTest.class);
|
||||
|
||||
private static final boolean IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE = false;
|
||||
|
||||
ThreadGaterPointcutLatch myThreadGaterPointcutLatchInterceptor;
|
||||
UserRequestRetryVersionConflictsInterceptor myUserRequestRetryVersionConflictsInterceptor;
|
||||
ResourceConcurrentSubmitterSvc myResourceConcurrentSubmitterSvc;
|
||||
|
@ -132,12 +133,12 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
|
|||
final ResourceTable resTable4 = myResourceTableDao.save(createResTable());
|
||||
|
||||
Date tooOldBy10Minutes = cutOffTimeMinus(tenMinutes);
|
||||
ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1).setCreatedTime(tooOldBy10Minutes);
|
||||
ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2).setCreatedTime(tooOldBy10Minutes);
|
||||
ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
|
||||
ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
|
||||
|
||||
Date tooNewBy10Minutes = cutOffTimePlus(tenMinutes);
|
||||
ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3).setCreatedTime(tooNewBy10Minutes);
|
||||
ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4).setCreatedTime(tooNewBy10Minutes);
|
||||
ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
|
||||
ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
|
||||
|
||||
myResourceSearchUrlDao.saveAll(asList(tooOld1, tooOld2, tooNew1, tooNew2));
|
||||
|
||||
|
@ -165,8 +166,8 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
|
|||
final ResourceTable resTable1 = myResourceTableDao.save(createResTable());
|
||||
final ResourceTable resTable2 = myResourceTableDao.save(createResTable());
|
||||
|
||||
ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1);
|
||||
ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2);
|
||||
ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
|
||||
ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
|
||||
myResourceSearchUrlDao.saveAll(asList(entry1, entry2));
|
||||
|
||||
// when
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
|
@ -65,6 +67,7 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||
|
||||
import java.math.BigDecimal;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -79,6 +82,7 @@ import java.util.concurrent.Future;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
|
||||
|
@ -1335,6 +1339,62 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
assertRemainingTasks();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
void conditionalCreateSameIdentifierCrossPartition(boolean theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(theIsSearchUrlDuplicateAcrossPartitionsEnabled);
|
||||
|
||||
final PartitionEntity partitionEntity1 = new PartitionEntity();
|
||||
partitionEntity1.setId(1);
|
||||
partitionEntity1.setName("Partition-A");
|
||||
myPartitionDao.save(partitionEntity1);
|
||||
|
||||
final PartitionEntity partitionEntity2 = new PartitionEntity();
|
||||
partitionEntity2.setId(2);
|
||||
partitionEntity2.setName("Partition-B");
|
||||
myPartitionDao.save(partitionEntity2);
|
||||
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
final String matchUrl = "identifier=http://tempuri.org|1";
|
||||
bundleBuilder.addTransactionCreateEntry(myTask1, "urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea")
|
||||
.conditional(matchUrl);
|
||||
|
||||
final RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1, LocalDate.now());
|
||||
final RequestPartitionId requestPartitionId2 = RequestPartitionId.fromPartitionId(2, LocalDate.now());
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries1 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId1);
|
||||
assertEquals(1, responseEntries1.size());
|
||||
final Bundle.BundleEntryComponent bundleEntry1 = responseEntries1.get(0);
|
||||
assertEquals("201 Created", bundleEntry1.getResponse().getStatus());
|
||||
|
||||
if (!theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
|
||||
final IBaseBundle bundle = bundleBuilder.getBundle();
|
||||
assertThatThrownBy(() -> sendBundleAndGetResponse(bundle, requestPartitionId2)).isInstanceOf(ResourceVersionConflictException.class);
|
||||
return;
|
||||
}
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries2 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId2);
|
||||
assertEquals(1, responseEntries2.size());
|
||||
final Bundle.BundleEntryComponent bundleEntry2 = responseEntries1.get(0);
|
||||
assertEquals("201 Created", bundleEntry2.getResponse().getStatus());
|
||||
|
||||
final List<ResourceSearchUrlEntity> allSearchUrls = myResourceSearchUrlDao.findAll();
|
||||
|
||||
assertThat(allSearchUrls).hasSize(2);
|
||||
|
||||
final String resolvedSearchUrl = "Task?identifier=http%3A%2F%2Ftempuri.org%7C1";
|
||||
|
||||
final ResourceSearchUrlEntity resourceSearchUrlEntity1 = allSearchUrls.get(0);
|
||||
final ResourceSearchUrlEntity resourceSearchUrlEntity2 = allSearchUrls.get(1);
|
||||
|
||||
assertThat(resourceSearchUrlEntity1.getSearchUrl()).isEqualTo(resolvedSearchUrl);
|
||||
assertThat(resourceSearchUrlEntity1.getPartitionId()).isEqualTo(partitionEntity1.getId());
|
||||
|
||||
assertThat(resourceSearchUrlEntity2.getSearchUrl()).isEqualTo(resolvedSearchUrl);
|
||||
assertThat(resourceSearchUrlEntity2.getPartitionId()).isEqualTo(partitionEntity2.getId());
|
||||
}
|
||||
|
||||
private void assertRemainingTasks(Task... theExpectedTasks) {
|
||||
final List<ResourceSearchUrlEntity> searchUrlsPreDelete = myResourceSearchUrlDao.findAll();
|
||||
|
||||
|
@ -1352,6 +1412,14 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle, RequestPartitionId thePartitionId) {
|
||||
assertThat(theRequestBundle).isInstanceOf(Bundle.class);
|
||||
|
||||
final SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setRequestPartitionId(thePartitionId);
|
||||
return mySystemDao.transaction(requestDetails, (Bundle)theRequestBundle).getEntry();
|
||||
}
|
||||
|
||||
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle) {
|
||||
assertTrue(theRequestBundle instanceof Bundle);
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
|
@ -20,6 +18,7 @@ import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
|||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.LenientErrorHandler;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
|
@ -33,6 +32,8 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
||||
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
|
||||
|
@ -40,13 +41,46 @@ import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
|||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.AllergyIntolerance;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.CapabilityStatement;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Condition;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.ElementDefinition;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.Location;
|
||||
import org.hl7.fhir.r4.model.Narrative;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Questionnaire;
|
||||
import org.hl7.fhir.r4.model.QuestionnaireResponse;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.r5.utils.validation.constants.BestPracticeWarningLevel;
|
||||
import org.hl7.fhir.r5.utils.validation.constants.ReferenceValidationPolicy;
|
||||
import org.hl7.fhir.utilities.i18n.I18nConstants;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
|
@ -59,16 +93,19 @@ import org.springframework.test.util.AopTestUtils;
|
|||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.JAVA_VALIDATOR_DETAILS_SYSTEM;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService.CURRENCIES_CODESYSTEM_URL;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
@ -1452,6 +1489,181 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
throw new IllegalStateException(); // shouldn't get here
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validateResource_withUnknownMetaProfileurl_validatesButLogsWarning() {
|
||||
// setup
|
||||
IParser parser = myFhirContext.newJsonParser();
|
||||
|
||||
myLogbackTestExtension.setUp(Level.WARN);
|
||||
|
||||
String obsStr ="""
|
||||
{
|
||||
"resourceType": "Observation",
|
||||
"meta": {
|
||||
"profile": [
|
||||
"http://example.com/StructureDefinition|a|b|c"
|
||||
]
|
||||
}
|
||||
}
|
||||
""";
|
||||
Observation observation = parser.parseResource(Observation.class, obsStr);
|
||||
|
||||
// test
|
||||
ValidationModeEnum mode = ValidationModeEnum.CREATE;
|
||||
MethodOutcome outcome = myObservationDao.validate(observation, null, obsStr, EncodingEnum.JSON, mode, null, mySrd);
|
||||
|
||||
// validator
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.getOperationOutcome() instanceof OperationOutcome);
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> issues = ((OperationOutcome) outcome.getOperationOutcome()).getIssue();
|
||||
assertFalse(issues.isEmpty());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> errors = issues.stream()
|
||||
.filter(i -> i.getSeverity() == OperationOutcome.IssueSeverity.ERROR)
|
||||
.toList();
|
||||
// we have errors
|
||||
assertFalse(errors.isEmpty());
|
||||
|
||||
List<ILoggingEvent> events = myLogbackTestExtension.filterLoggingEventsWithPredicate(e -> {
|
||||
return e.getLevel() == Level.WARN;
|
||||
});
|
||||
// and we have warning logs
|
||||
assertFalse(events.isEmpty());
|
||||
assertTrue(events.stream().anyMatch(e -> e.getFormattedMessage().contains("Unrecognized profile uri")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validateResource_withMetaProfileWithVersion_validatesAsExpected() {
|
||||
// setup
|
||||
IParser parser = myFhirContext.newJsonParser();
|
||||
|
||||
// create our structure definition
|
||||
@Language("JSON")
|
||||
String strDefStr =
|
||||
"""
|
||||
{
|
||||
"resourceType": "StructureDefinition",
|
||||
"id": "example-profile",
|
||||
"url": "http://example.com/StructureDefinition",
|
||||
"version": "1.0.0",
|
||||
"name": "observation-example",
|
||||
"title": "Example Profile",
|
||||
"status": "active",
|
||||
"experimental": false,
|
||||
"date": "2016-03-25",
|
||||
"description": "Example Profile",
|
||||
"fhirVersion": "4.0.1",
|
||||
"kind": "resource",
|
||||
"abstract": false,
|
||||
"type": "Observation",
|
||||
"baseDefinition": "http://hl7.org/fhir/StructureDefinition/Observation",
|
||||
"derivation": "constraint",
|
||||
"differential": {
|
||||
"element": [
|
||||
{
|
||||
"id": "Observation",
|
||||
"path": "Observation",
|
||||
"short": "Example Profile",
|
||||
"alias": [
|
||||
"Example"
|
||||
],
|
||||
"min": 0,
|
||||
"max": "*"
|
||||
},
|
||||
{
|
||||
"id": "Observation.code",
|
||||
"path": "Observation.code",
|
||||
"short": "Coded Responses from C-CDA Vital Sign Results",
|
||||
"definition": "Coded Responses from C-CDA Vital Sign Results.",
|
||||
"requirements": "5. SHALL contain exactly one [1..1] code, where the @code SHOULD be selected from ValueSet Example",
|
||||
"min": 1,
|
||||
"max": "1",
|
||||
"type": [
|
||||
{
|
||||
"code": "CodeableConcept"
|
||||
}
|
||||
],
|
||||
"mustSupport": true,
|
||||
"binding": {
|
||||
"strength": "required",
|
||||
"description": "This identifies the vital sign result type.",
|
||||
"valueSet": "http://example.com/valueset"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
""";
|
||||
StructureDefinition sd = parser.parseResource(StructureDefinition.class, strDefStr);
|
||||
myStructureDefinitionDao.create(sd, mySrd);
|
||||
|
||||
@Language("JSON")
|
||||
String obsStr ="""
|
||||
{
|
||||
"resourceType": "Observation",
|
||||
"meta": {
|
||||
"profile": [
|
||||
"http://example.com/StructureDefinition|1.0.0"
|
||||
]
|
||||
},
|
||||
"identifier": [
|
||||
{
|
||||
"use": "official",
|
||||
"system": "http://www.bmc.nl/zorgportal/identifiers/observations",
|
||||
"value": "6323"
|
||||
}
|
||||
],
|
||||
"status": "final",
|
||||
"code": {
|
||||
"coding": [
|
||||
{
|
||||
"system": "http://example.com/codesystem",
|
||||
"code": "some-code",
|
||||
"display": "Some Code"
|
||||
}
|
||||
]
|
||||
},
|
||||
"subject": {
|
||||
"reference": "Patient/1452"
|
||||
},
|
||||
"effectiveDateTime": "2013-05-02T09:30:10+01:00",
|
||||
"issued": "2013-04-03T15:30:10+01:00",
|
||||
"valueQuantity": {
|
||||
"value": 6.3,
|
||||
"unit": "mmol/l",
|
||||
"system": "http://unitsofmeasure.org",
|
||||
"code": "mmol/L"
|
||||
},
|
||||
"interpretation": [
|
||||
{
|
||||
"coding": [
|
||||
{
|
||||
"system": "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation",
|
||||
"code": "H",
|
||||
"display": "High"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
Observation observation = parser.parseResource(Observation.class, obsStr);
|
||||
|
||||
// test
|
||||
ValidationModeEnum mode = ValidationModeEnum.CREATE;
|
||||
MethodOutcome outcome = myObservationDao.validate(observation, null, obsStr, EncodingEnum.JSON, mode, null, mySrd);
|
||||
|
||||
// verify
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.getOperationOutcome() instanceof OperationOutcome);
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> issues = ((OperationOutcome) outcome.getOperationOutcome()).getIssue();
|
||||
assertFalse(issues.isEmpty());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> errors = issues.stream()
|
||||
.filter(i -> i.getSeverity() == OperationOutcome.IssueSeverity.ERROR)
|
||||
.toList();
|
||||
// no errors - just warnings
|
||||
assertTrue(errors.isEmpty(), errors.stream().map(OperationOutcome.OperationOutcomeIssueComponent::getDiagnostics).collect(Collectors.joining(",")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateResourceContainingProfileDeclarationInvalid() {
|
||||
String methodName = "testValidateResourceContainingProfileDeclarationInvalid";
|
||||
|
|
|
@ -113,8 +113,7 @@ public abstract class JpaEmbeddedDatabase {
|
|||
}
|
||||
|
||||
public void executeSqlAsBatch(List<String> theStatements) {
|
||||
try {
|
||||
Statement statement = myConnection.createStatement();
|
||||
try (final Statement statement = myConnection.createStatement()) {
|
||||
for (String sql : theStatements) {
|
||||
if (!StringUtils.isBlank(sql)) {
|
||||
statement.addBatch(sql);
|
||||
|
|
|
@ -116,6 +116,7 @@ import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
|||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
|
@ -566,6 +567,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@RegisterExtension
|
||||
private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry);
|
||||
|
||||
@RegisterExtension
|
||||
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension();
|
||||
|
||||
@AfterEach()
|
||||
@Order(0)
|
||||
public void afterCleanupDao() {
|
||||
|
|
|
@ -10,6 +10,8 @@ import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
|||
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -21,8 +23,19 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
|
||||
|
@ -38,6 +51,20 @@ public class HapiSchemaMigrationTest {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(HapiSchemaMigrationTest.class);
|
||||
public static final String TEST_SCHEMA_NAME = "test";
|
||||
|
||||
private static final String METADATA_COLUMN_NAME = "COLUMN_NAME";
|
||||
private static final String METADATA_DATA_TYPE = "DATA_TYPE";
|
||||
private static final String METADATA_IS_NULLABLE = "IS_NULLABLE";
|
||||
private static final String METADATA_DEFAULT_VALUE = "COLUMN_DEF";
|
||||
private static final String METADATA_IS_NULLABLE_NO = "NO";
|
||||
private static final String METADATA_IS_NULLABLE_YES = "YES";
|
||||
|
||||
private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL";
|
||||
private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL";
|
||||
private static final String COLUMN_PARTITION_ID = "PARTITION_ID";
|
||||
private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE";
|
||||
|
||||
private static final String NULL_PLACEHOLDER = "[NULL]";
|
||||
|
||||
static {
|
||||
HapiSystemProperties.enableUnitTestMode();
|
||||
}
|
||||
|
@ -92,10 +119,131 @@ public class HapiSchemaMigrationTest {
|
|||
}
|
||||
|
||||
verifyForcedIdMigration(dataSource);
|
||||
|
||||
verifyHfjResSearchUrlMigration(database, theDriverType);
|
||||
}
|
||||
|
||||
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException {
|
||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to});
|
||||
/**
|
||||
* We start with a single record in HFJ_RES_SEARCH_URL:
|
||||
* <p/>
|
||||
* <ul>
|
||||
* <li>Primary key: ONLY RES_SEARCH_URL</li>
|
||||
* <li>PK: RES_SEARCH_URL: https://example.com</li>
|
||||
* <li>CREATED_TIME: 2023-06-29 10:14:39.69</li>
|
||||
* <li>RES_ID: 1678</li>
|
||||
* </ul>
|
||||
* <p/>
|
||||
* Once the migration is complete, we should have:
|
||||
* <ul>
|
||||
* <li>Primary key: RES_SEARCH_URL, PARTITION_ID</li>
|
||||
* <li>PK: RES_SEARCH_URL: https://example.com</li>
|
||||
* <li>PK: PARTITION_ID: -1</li>
|
||||
* <li>CREATED_TIME: 2023-06-29 10:14:39.69</li>
|
||||
* <li>RES_ID: 1678</li>
|
||||
* <li>PARTITION_DATE: null</li>
|
||||
* </ul>
|
||||
*/
|
||||
private void verifyHfjResSearchUrlMigration(JpaEmbeddedDatabase theDatabase, DriverTypeEnum theDriverType) throws SQLException {
|
||||
final List<Map<String, Object>> allCount = theDatabase.query(String.format("SELECT count(*) FROM %s", TABLE_HFJ_RES_SEARCH_URL));
|
||||
final List<Map<String, Object>> minusOnePartitionCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s = -1", TABLE_HFJ_RES_SEARCH_URL, COLUMN_PARTITION_ID));
|
||||
|
||||
assertThat(minusOnePartitionCount).hasSize(1);
|
||||
final Collection<Object> queryResultValues = minusOnePartitionCount.get(0).values();
|
||||
assertThat(queryResultValues).hasSize(1);
|
||||
final Object queryResultValue = queryResultValues.iterator().next();
|
||||
assertThat(queryResultValue).isInstanceOf(Number.class);
|
||||
if (queryResultValue instanceof Number queryResultNumber) {
|
||||
assertThat(queryResultNumber.intValue()).isEqualTo(1);
|
||||
}
|
||||
|
||||
final Object allCountValue = allCount.get(0).values().iterator().next();
|
||||
if (allCountValue instanceof Number allCountNumber) {
|
||||
assertThat(allCountNumber.intValue()).isEqualTo(1);
|
||||
}
|
||||
|
||||
try (final Connection connection = theDatabase.getDataSource().getConnection()) {
|
||||
final DatabaseMetaData tableMetaData = connection.getMetaData();
|
||||
|
||||
final List<Map<String,String>> actualColumnResults = new ArrayList<>();
|
||||
try (final ResultSet columnsResultSet = tableMetaData.getColumns(null, null, TABLE_HFJ_RES_SEARCH_URL, null)) {
|
||||
while (columnsResultSet.next()) {
|
||||
final Map<String, String> columnMap = new HashMap<>();
|
||||
actualColumnResults.add(columnMap);
|
||||
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_COLUMN_NAME);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DATA_TYPE);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_IS_NULLABLE);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DEFAULT_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("6145: actualColumnResults: {}", actualColumnResults);
|
||||
|
||||
final List<Map<String,String>> actualPrimaryKeyResults = new ArrayList<>();
|
||||
|
||||
try (final ResultSet primaryKeyResultSet = tableMetaData.getPrimaryKeys(null, null, TABLE_HFJ_RES_SEARCH_URL)) {
|
||||
while (primaryKeyResultSet.next()) {
|
||||
final Map<String, String> primaryKeyMap = new HashMap<>();
|
||||
actualPrimaryKeyResults.add(primaryKeyMap);
|
||||
extractAndAddToMap(primaryKeyResultSet, primaryKeyMap, METADATA_COLUMN_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
final List<Map<String, String>> expectedPrimaryKeyResults = List.of(
|
||||
Map.of(METADATA_COLUMN_NAME, COLUMN_RES_SEARCH_URL),
|
||||
Map.of(METADATA_COLUMN_NAME, COLUMN_PARTITION_ID)
|
||||
);
|
||||
|
||||
assertThat(expectedPrimaryKeyResults).containsAll(actualPrimaryKeyResults);
|
||||
|
||||
final List<Map<String, String>> expectedColumnResults = List.of(
|
||||
addExpectedColumnMetadata(COLUMN_RES_SEARCH_URL, Integer.toString(Types.VARCHAR), METADATA_IS_NULLABLE_NO, null),
|
||||
addExpectedColumnMetadata("RES_ID", getExpectedSqlTypeForResId(theDriverType), METADATA_IS_NULLABLE_NO, null),
|
||||
addExpectedColumnMetadata("CREATED_TIME", Integer.toString(Types.TIMESTAMP), METADATA_IS_NULLABLE_NO, null),
|
||||
addExpectedColumnMetadata(COLUMN_PARTITION_ID, getExpectedSqlTypeForPartitionId(theDriverType), METADATA_IS_NULLABLE_NO, "-1"),
|
||||
addExpectedColumnMetadata(COLUMN_PARTITION_DATE, getExpectedSqlTypeForPartitionDate(theDriverType), METADATA_IS_NULLABLE_YES, null)
|
||||
);
|
||||
|
||||
assertThat(expectedColumnResults).containsAll(actualColumnResults);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Map<String, String> addExpectedColumnMetadata(String theColumnName, String theDataType, String theNullable, @Nullable String theDefaultValue) {
|
||||
return Map.of(METADATA_COLUMN_NAME, theColumnName,
|
||||
METADATA_DATA_TYPE, theDataType,
|
||||
METADATA_IS_NULLABLE, theNullable,
|
||||
METADATA_DEFAULT_VALUE, Optional.ofNullable(theDefaultValue)
|
||||
.orElse(NULL_PLACEHOLDER));
|
||||
}
|
||||
|
||||
private String getExpectedSqlTypeForResId(DriverTypeEnum theDriverType) {
|
||||
return DriverTypeEnum.ORACLE_12C == theDriverType
|
||||
? Integer.toString(Types.NUMERIC)
|
||||
: Integer.toString(Types.BIGINT);
|
||||
}
|
||||
|
||||
private String getExpectedSqlTypeForPartitionId(DriverTypeEnum theDriverType) {
|
||||
return DriverTypeEnum.ORACLE_12C == theDriverType
|
||||
? Integer.toString(Types.NUMERIC)
|
||||
: Integer.toString(Types.INTEGER);
|
||||
}
|
||||
|
||||
private String getExpectedSqlTypeForPartitionDate(DriverTypeEnum theDriverType) {
|
||||
return DriverTypeEnum.ORACLE_12C == theDriverType
|
||||
? Integer.toString(Types.TIMESTAMP)
|
||||
: Integer.toString(Types.DATE);
|
||||
}
|
||||
|
||||
private void extractAndAddToMap(ResultSet theResultSet, Map<String,String> theMap, String theColumn) throws SQLException {
|
||||
theMap.put(theColumn, Optional.ofNullable(theResultSet.getString(theColumn))
|
||||
.map(defaultValueNonNull -> defaultValueNonNull.equals("((-1))") ? "-1" : defaultValueNonNull) // MSSQL returns "((-1))" for default value
|
||||
.map(String::toUpperCase)
|
||||
.orElse(NULL_PLACEHOLDER));
|
||||
}
|
||||
|
||||
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) {
|
||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(to);
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
||||
schemaMigrator.setDriverType(theDriverType);
|
||||
schemaMigrator.createMigrationTableIfRequired();
|
||||
|
@ -103,7 +251,7 @@ public class HapiSchemaMigrationTest {
|
|||
}
|
||||
|
||||
/**
|
||||
* For bug https://github.com/hapifhir/hapi-fhir/issues/5546
|
||||
* For bug <a href="https://github.com/hapifhir/hapi-fhir/issues/5546">https://github.com/hapifhir/hapi-fhir/issues/5546</a>
|
||||
*/
|
||||
private void verifyForcedIdMigration(DataSource theDataSource) throws SQLException {
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(theDataSource);
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -74,17 +75,20 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
|
|||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
// Quote the column name as "SYSTEM" is a reserved word in MySQL
|
||||
sql = "alter table " + getTableName() + " add column `" + getColumnName() + "` " + typeStatement;
|
||||
sql = "alter table " + getTableName() + " add column `" + getColumnName() + "` " + typeStatement
|
||||
+ buildDefaultClauseIfApplicable();
|
||||
break;
|
||||
case DERBY_EMBEDDED:
|
||||
case POSTGRES_9_4:
|
||||
case COCKROACHDB_21_1:
|
||||
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement;
|
||||
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement
|
||||
+ buildDefaultClauseIfApplicable();
|
||||
break;
|
||||
case MSSQL_2012:
|
||||
case ORACLE_12C:
|
||||
case H2_EMBEDDED:
|
||||
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + typeStatement;
|
||||
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + typeStatement
|
||||
+ buildDefaultClauseIfApplicable();
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException(Msg.code(60));
|
||||
|
@ -94,6 +98,11 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
|
|||
executeSql(getTableName(), sql);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private String buildDefaultClauseIfApplicable() {
|
||||
return buildString(getDefaultValue(), (obj -> " default " + obj), "");
|
||||
}
|
||||
|
||||
public String getTypeStatement() {
|
||||
String type = getSqlType();
|
||||
String nullable = getSqlNotNull();
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Migration task that handles cross-database logic for adding a new primary key.
|
||||
*/
|
||||
public class AddPrimaryKeyTask extends BaseTableTask {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AddPrimaryKeyTask.class);
|
||||
|
||||
private final List<String> myPrimaryKeyColumnsInOrder;
|
||||
|
||||
public AddPrimaryKeyTask(
|
||||
String theProductVersion, String theSchemaVersion, String theTableName, String... theColumnsInOrder) {
|
||||
super(theProductVersion, theSchemaVersion);
|
||||
setTableName(theTableName);
|
||||
|
||||
myPrimaryKeyColumnsInOrder = Arrays.asList(theColumnsInOrder);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private String generateSql() {
|
||||
switch (getDriverType()) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
case POSTGRES_9_4:
|
||||
case DERBY_EMBEDDED:
|
||||
case H2_EMBEDDED:
|
||||
case ORACLE_12C:
|
||||
case MSSQL_2012:
|
||||
case COCKROACHDB_21_1:
|
||||
return String.format(
|
||||
"ALTER TABLE %s ADD PRIMARY KEY (%s)",
|
||||
getTableName(), String.join(", ", myPrimaryKeyColumnsInOrder));
|
||||
default:
|
||||
throw new IllegalStateException(String.format(
|
||||
"%s Unknown driver type. Cannot add primary key for task %s",
|
||||
Msg.code(2531), getMigrationVersion()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute() throws SQLException {
|
||||
logInfo(
|
||||
ourLog,
|
||||
"Going to add a primary key on table {} for columns {}",
|
||||
getTableName(),
|
||||
myPrimaryKeyColumnsInOrder);
|
||||
|
||||
executeSql(getTableName(), generateSql());
|
||||
}
|
||||
}
|
|
@ -19,18 +19,24 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask {
|
||||
private ColumnTypeEnum myColumnType;
|
||||
private Boolean myNullable;
|
||||
private Long myColumnLength;
|
||||
|
||||
@Nullable
|
||||
private Object myDefaultValue;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -99,6 +105,21 @@ public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Object getDefaultValue() {
|
||||
return myDefaultValue;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
String buildString(@Nullable Object theValue, Function<Object, String> doIfNull, String theDefaultResult) {
|
||||
return Optional.ofNullable(theValue).map(doIfNull).orElse(theDefaultResult);
|
||||
}
|
||||
|
||||
public BaseTableColumnTypeTask setDefaultValue(Object theDefaultValue) {
|
||||
myDefaultValue = theDefaultValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void generateHashCode(HashCodeBuilder theBuilder) {
|
||||
super.generateHashCode(theBuilder);
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
|
|||
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
|
||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
@ -36,6 +37,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.ResultSetExtractor;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
@ -174,6 +176,17 @@ public abstract class BaseTask {
|
|||
captureExecutedStatement(theTableName, theSql, theArguments);
|
||||
}
|
||||
|
||||
protected <T> T executeSqlWithResult(
|
||||
@Language("SQL") String theSql, ResultSetExtractor<T> theResultSetExtractor, Object... theArguments) {
|
||||
if (myTransactional) {
|
||||
return getConnectionProperties()
|
||||
.getTxTemplate()
|
||||
.execute(t -> doExecuteSqlWithResult(theSql, theResultSetExtractor, theArguments));
|
||||
}
|
||||
|
||||
return doExecuteSqlWithResult(theSql, theResultSetExtractor, theArguments);
|
||||
}
|
||||
|
||||
protected void executeSqlListInTransaction(String theTableName, List<String> theSqlStatements) {
|
||||
if (!isDryRun()) {
|
||||
Integer changes;
|
||||
|
@ -207,7 +220,9 @@ public abstract class BaseTask {
|
|||
return changesCount;
|
||||
} catch (DataAccessException e) {
|
||||
if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
|
||||
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
|
||||
ourLog.info(
|
||||
"Task {} did not exit successfully on doExecuteSql(), but task is allowed to fail",
|
||||
getMigrationVersion());
|
||||
ourLog.debug("Error was: {}", e.getMessage(), e);
|
||||
return 0;
|
||||
} else {
|
||||
|
@ -217,6 +232,32 @@ public abstract class BaseTask {
|
|||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private <T> T doExecuteSqlWithResult(
|
||||
@Language("SQL") String theSql, ResultSetExtractor<T> theResultSetExtractor, Object... theArguments) {
|
||||
final JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
// 0 means no timeout -- we use this for index rebuilds that may take time.
|
||||
jdbcTemplate.setQueryTimeout(0);
|
||||
try {
|
||||
T result = jdbcTemplate.query(theSql, theResultSetExtractor, theArguments);
|
||||
if (!HapiSystemProperties.isUnitTestModeEnabled()) {
|
||||
logInfo(ourLog, "SQL \"{}\" returned result {}", theSql, result);
|
||||
}
|
||||
return result;
|
||||
} catch (DataAccessException e) {
|
||||
if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
|
||||
ourLog.info(
|
||||
"Task {} did not exit successfully on doExecuteSqlWithResult(), but task is allowed to fail",
|
||||
getMigrationVersion());
|
||||
ourLog.debug("Error was: {}", e.getMessage(), e);
|
||||
return null;
|
||||
} else {
|
||||
throw new HapiMigrationException(
|
||||
Msg.code(2532) + "Failed during task " + getMigrationVersion() + ": " + e, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void captureExecutedStatement(
|
||||
String theTableName, @Language("SQL") String theSql, Object... theArguments) {
|
||||
myExecutedStatements.add(new ExecutedStatement(mySchemaVersion, theTableName, theSql, theArguments));
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ResultSetExtractor;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* Migration task that handles cross-database logic for dropping a primary key.
|
||||
* <p>
|
||||
* The process involves 2 steps for most databases:
|
||||
* <ol>
|
||||
* <li>Running SQL to introspect the metadata tables to determine the name of the primary key.</li>
|
||||
* <li>Running an ALTER TABLE to drop the constraint found above by name.</li>
|
||||
* </ol>
|
||||
*/
|
||||
public class DropPrimaryKeyTask extends BaseTableTask {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DropPrimaryKeyTask.class);
|
||||
|
||||
public DropPrimaryKeyTask(String theProductVersion, String theSchemaVersion, String theTableName) {
|
||||
super(theProductVersion, theSchemaVersion);
|
||||
setTableName(theTableName);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private String generateSql() {
|
||||
ourLog.debug("DropPrimaryKeyTask.generateSql()");
|
||||
|
||||
final ResultSetExtractor<String> resultSetExtractor = rs -> {
|
||||
if (rs.next()) {
|
||||
final String singleResult = rs.getString(1);
|
||||
|
||||
if (rs.next()) {
|
||||
throw new IllegalArgumentException(Msg.code(2533)
|
||||
+ "Expecting only a single result for the table primary but got multiple for task: "
|
||||
+ getMigrationVersion());
|
||||
}
|
||||
|
||||
return singleResult;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
@Nullable
|
||||
@Language("SQL")
|
||||
final String primaryKeyNameSql = generatePrimaryKeyNameSql();
|
||||
|
||||
@Nullable
|
||||
final String primaryKeyName = primaryKeyNameSql != null
|
||||
? executeSqlWithResult(primaryKeyNameSql, resultSetExtractor, getTableNameWithDatabaseExpectedCase())
|
||||
: null;
|
||||
|
||||
ourLog.debug("primaryKeyName: {} for driver: {}", primaryKeyName, getDriverType());
|
||||
|
||||
return generateDropPrimaryKeySql(primaryKeyName);
|
||||
}
|
||||
|
||||
private String getTableNameWithDatabaseExpectedCase() {
|
||||
if (DriverTypeEnum.ORACLE_12C == getDriverType()) {
|
||||
return getTableName().toUpperCase();
|
||||
}
|
||||
|
||||
return getTableName().toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute() throws SQLException {
|
||||
logInfo(ourLog, "Going to DROP the PRIMARY KEY on table {}", getTableName());
|
||||
|
||||
executeSql(getTableName(), generateSql());
|
||||
}
|
||||
|
||||
private String generateDropPrimaryKeySql(@Nullable String thePrimaryKeyName) {
|
||||
switch (getDriverType()) {
|
||||
case MARIADB_10_1:
|
||||
case DERBY_EMBEDDED:
|
||||
case H2_EMBEDDED:
|
||||
@Language("SQL")
|
||||
final String sqlH2 = "ALTER TABLE %s DROP PRIMARY KEY";
|
||||
return String.format(sqlH2, getTableName());
|
||||
case POSTGRES_9_4:
|
||||
case ORACLE_12C:
|
||||
case MSSQL_2012:
|
||||
case MYSQL_5_7:
|
||||
assert thePrimaryKeyName != null;
|
||||
@Language("SQL")
|
||||
final String sql = "ALTER TABLE %s DROP CONSTRAINT %s";
|
||||
return String.format(sql, getTableName(), thePrimaryKeyName);
|
||||
default:
|
||||
throw new IllegalStateException(String.format(
|
||||
"%s Unknown driver type: %s. Cannot drop primary key: %s for task %s",
|
||||
Msg.code(2529), getDriverType(), getMigrationVersion(), getTableName()));
|
||||
}
|
||||
}
|
||||
|
||||
@Language("SQL")
|
||||
@Nullable
|
||||
private String generatePrimaryKeyNameSql() {
|
||||
switch (getDriverType()) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
case DERBY_EMBEDDED:
|
||||
case COCKROACHDB_21_1:
|
||||
case H2_EMBEDDED:
|
||||
return null; // Irrelevant: We don't need to run the SQL for these databases.
|
||||
case POSTGRES_9_4:
|
||||
return "SELECT constraint_name " + "FROM information_schema.table_constraints "
|
||||
+ "WHERE table_schema = 'public' "
|
||||
+ "AND constraint_type = 'PRIMARY KEY' "
|
||||
+ "AND table_name = ?";
|
||||
case ORACLE_12C:
|
||||
return "SELECT constraint_name " + "FROM user_constraints "
|
||||
+ "WHERE constraint_type = 'P' "
|
||||
+ "AND table_name = ?";
|
||||
case MSSQL_2012:
|
||||
return "SELECT tc.constraint_name " + "FROM information_schema.table_constraints tc "
|
||||
+ "JOIN information_schema.constraint_column_usage ccu ON tc.constraint_name = ccu.constraint_name "
|
||||
+ "WHERE tc.constraint_type = 'PRIMARY KEY' "
|
||||
+ "AND tc.table_name = ?";
|
||||
default:
|
||||
throw new IllegalStateException(String.format(
|
||||
"%s Unknown driver type: %s Cannot find primary key to drop for task %s",
|
||||
Msg.code(2530), getDriverType(), getMigrationVersion()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.AddForeignKeyTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddIdGeneratorTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddIndexTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddPrimaryKeyTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableByColumnTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableTask;
|
||||
|
@ -35,6 +36,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropColumnTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.DropForeignKeyTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropIdGeneratorTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropPrimaryKeyTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
|
||||
|
@ -47,6 +49,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.NopTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameTableTask;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -260,7 +263,13 @@ public class Builder {
|
|||
}
|
||||
|
||||
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theVersion, String theColumnName) {
|
||||
return new BuilderWithTableName.BuilderAddColumnWithName(myRelease, theVersion, theColumnName, this);
|
||||
return new BuilderWithTableName.BuilderAddColumnWithName(myRelease, theVersion, theColumnName, null, this);
|
||||
}
|
||||
|
||||
public BuilderWithTableName.BuilderAddColumnWithName addColumn(
|
||||
String theVersion, String theColumnName, Object theDefaultValue) {
|
||||
return new BuilderWithTableName.BuilderAddColumnWithName(
|
||||
myRelease, theVersion, theColumnName, theDefaultValue, this);
|
||||
}
|
||||
|
||||
public BuilderCompleteTask dropColumn(String theVersion, String theColumnName) {
|
||||
|
@ -317,6 +326,10 @@ public class Builder {
|
|||
return Optional.ofNullable(myLastAddedTask);
|
||||
}
|
||||
|
||||
public void addPrimaryKey(String theVersion, String... theColumnsInOrder) {
|
||||
addTask(new AddPrimaryKeyTask(myRelease, theVersion, myTableName, theColumnsInOrder));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param theFkName the name of the foreign key
|
||||
* @param theParentTableName the name of the table that exports the foreign key
|
||||
|
@ -362,6 +375,11 @@ public class Builder {
|
|||
return new BuilderCompleteTask(task);
|
||||
}
|
||||
|
||||
public void dropPrimaryKey(String theVersion) {
|
||||
final DropPrimaryKeyTask task = new DropPrimaryKeyTask(myRelease, theVersion, myTableName);
|
||||
addTask(task);
|
||||
}
|
||||
|
||||
public class BuilderAddIndexWithName {
|
||||
private final String myVersion;
|
||||
private final String myIndexName;
|
||||
|
@ -547,16 +565,22 @@ public class Builder {
|
|||
private final String myRelease;
|
||||
private final String myVersion;
|
||||
private final String myColumnName;
|
||||
|
||||
@Nullable
|
||||
private final Object myDefaultValue;
|
||||
|
||||
private final BaseMigrationTasks.IAcceptsTasks myTaskSink;
|
||||
|
||||
public BuilderAddColumnWithName(
|
||||
String theRelease,
|
||||
String theVersion,
|
||||
String theColumnName,
|
||||
@Nullable Object theDefaultValue,
|
||||
BaseMigrationTasks.IAcceptsTasks theTaskSink) {
|
||||
myRelease = theRelease;
|
||||
myVersion = theVersion;
|
||||
myColumnName = theColumnName;
|
||||
myDefaultValue = theDefaultValue;
|
||||
myTaskSink = theTaskSink;
|
||||
}
|
||||
|
||||
|
@ -593,6 +617,7 @@ public class Builder {
|
|||
if (theLength != null) {
|
||||
task.setColumnLength(theLength);
|
||||
}
|
||||
task.setDefaultValue(myDefaultValue);
|
||||
myTaskSink.addTask(task);
|
||||
|
||||
return new BuilderCompleteTask(task);
|
||||
|
@ -719,7 +744,7 @@ public class Builder {
|
|||
}
|
||||
|
||||
public BuilderAddColumnWithName addColumn(String theColumnName) {
|
||||
return new BuilderAddColumnWithName(myRelease, myVersion, theColumnName, this);
|
||||
return new BuilderAddColumnWithName(myRelease, myVersion, theColumnName, null, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -148,10 +149,15 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory {
|
|||
}
|
||||
}
|
||||
|
||||
// Populate Bundle.entry.search
|
||||
BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource);
|
||||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -41,6 +41,7 @@ import jakarta.annotation.Nonnull;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -138,10 +139,15 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory {
|
|||
}
|
||||
populateBundleEntryFullUrl(next, entry);
|
||||
|
||||
// Populate Bundle.entry.search
|
||||
BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(next);
|
||||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValue(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(next);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -173,6 +174,10 @@ public class Dstu3BundleFactory implements IVersionSpecificBundleFactory {
|
|||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -178,6 +179,10 @@ public class Dstu2Hl7OrgBundleFactory implements IVersionSpecificBundleFactory {
|
|||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.hl7.fhir.r4.model.DomainResource;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -181,6 +182,10 @@ public class R4BundleFactory implements IVersionSpecificBundleFactory {
|
|||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
@ -550,6 +551,55 @@ public class BundleUtilTest {
|
|||
assertNotNull(searchBundleEntryParts.get(0).getResource());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertingToSearchBundleEntryPartsReturnsScore() {
|
||||
|
||||
//Given
|
||||
String bundleString = """
|
||||
{
|
||||
"resourceType": "Bundle",
|
||||
"id": "bd194b7f-ac1e-429a-a206-ee2c470f23b5",
|
||||
"type": "searchset",
|
||||
"total": 1,
|
||||
"link": [
|
||||
{
|
||||
"relation": "self",
|
||||
"url": "http://localhost:8000/Condition?_count=1"
|
||||
}
|
||||
],
|
||||
"entry": [
|
||||
{
|
||||
"fullUrl": "http://localhost:8000/Condition/1626",
|
||||
"resource": {
|
||||
"resourceType": "Condition",
|
||||
"id": "1626",
|
||||
"identifier": [
|
||||
{
|
||||
"system": "urn:hssc:musc:conditionid",
|
||||
"value": "1064115000.1.5"
|
||||
}
|
||||
]
|
||||
},
|
||||
"search": {
|
||||
"mode": "match",
|
||||
"score": 1
|
||||
}
|
||||
}
|
||||
]
|
||||
}""";
|
||||
Bundle bundle = ourCtx.newJsonParser().parseResource(Bundle.class, bundleString);
|
||||
|
||||
//When
|
||||
List<SearchBundleEntryParts> searchBundleEntryParts = BundleUtil.getSearchBundleEntryParts(ourCtx, bundle);
|
||||
|
||||
//Then
|
||||
assertThat(searchBundleEntryParts).hasSize(1);
|
||||
assertEquals(BundleEntrySearchModeEnum.MATCH, searchBundleEntryParts.get(0).getSearchMode());
|
||||
assertEquals(new BigDecimal(1), searchBundleEntryParts.get(0).getSearchScore());
|
||||
assertThat(searchBundleEntryParts.get(0).getFullUrl()).contains("Condition/1626");
|
||||
assertNotNull(searchBundleEntryParts.get(0).getResource());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionSorterReturnsDeletesInCorrectProcessingOrder() {
|
||||
Bundle b = new Bundle();
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.hl7.fhir.r4b.model.DomainResource;
|
|||
import org.hl7.fhir.r4b.model.IdType;
|
||||
import org.hl7.fhir.r4b.model.Resource;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -177,6 +178,10 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory {
|
|||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.hl7.fhir.r5.model.DomainResource;
|
|||
import org.hl7.fhir.r5.model.IdType;
|
||||
import org.hl7.fhir.r5.model.Resource;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -177,6 +178,10 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory {
|
|||
if (searchMode != null) {
|
||||
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
|
||||
}
|
||||
BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
|
||||
if (searchScore != null) {
|
||||
entry.getSearch().getScoreElement().setValue(searchScore);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -51,7 +51,11 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -486,12 +490,22 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo
|
|||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResource(Class<T> class_, String uri) {
|
||||
|
||||
if (isBlank(uri)) {
|
||||
public <T extends Resource> T fetchResource(Class<T> class_, String theUri) {
|
||||
if (isBlank(theUri)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String uri = theUri;
|
||||
// handle profile version, if present
|
||||
if (theUri.contains("|")) {
|
||||
String[] parts = theUri.split("\\|");
|
||||
if (parts.length == 2) {
|
||||
uri = parts[0];
|
||||
} else {
|
||||
ourLog.warn("Unrecognized profile uri: {}", theUri);
|
||||
}
|
||||
}
|
||||
|
||||
ResourceKey key = new ResourceKey(class_.getSimpleName(), uri);
|
||||
@SuppressWarnings("unchecked")
|
||||
T retVal = (T) myFetchResourceCache.get(key);
|
||||
|
|
Loading…
Reference in New Issue