Preserve tags on delete ()

* Preserve tags on delete

* Add changelog

* Add changelog

* Test fix

* Build fix

* Address review comments
This commit is contained in:
James Agnew 2022-10-12 07:01:47 -04:00 committed by GitHub
parent 5831cbedf5
commit 108e357ca7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 336 additions and 24 deletions
hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_2_0
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao
hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2
hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4
hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 4128
title: "In the JPA server, when deleting a resource the associated tags were previously deleted even though
the FHIR specification states that tags are independent of FHIR versioning. After this fix, resource tags
and security labels will remain when a resource is deleted. They can be fetched using the `$meta` operation
against the deleted resource, and will remain if the resource is brought back in a subsequent update."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4128
title: "In the JPA server, when a resource is being updated, the response will now include any tags or
security labels which were not present in the request but were carried forward from the previous
version of the resource."

View File

@ -1178,13 +1178,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
// 5. fill MetaData
if (retVal instanceof IResource) {
IResource res = (IResource) retVal;
retVal = populateResourceMetadataHapi(resourceType, theEntity, tagList, theForHistoryOperation, res, version);
} else {
IAnyResource res = (IAnyResource) retVal;
retVal = populateResourceMetadataRi(resourceType, theEntity, tagList, theForHistoryOperation, res, version);
}
retVal = populateResourceMetadata(theEntity, theForHistoryOperation, tagList, version, resourceType, retVal);
// 6. Handle source (provenance)
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
@ -1209,6 +1203,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return retVal;
}
protected <R extends IBaseResource> R populateResourceMetadata(IBaseResourceEntity theEntity, boolean theForHistoryOperation, @Nullable Collection<? extends BaseTag> tagList, long theVersion, Class<R> theResourceType, R theResource) {
if (theResource instanceof IResource) {
IResource res = (IResource) theResource;
theResource = populateResourceMetadataHapi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
} else {
IAnyResource res = (IAnyResource) theResource;
theResource = populateResourceMetadataRi(theResourceType, theEntity, tagList, theForHistoryOperation, res, theVersion);
}
return theResource;
}
public String toResourceName(Class<? extends IBaseResource> theResourceType) {
return myContext.getResourceType(theResourceType);
}

View File

@ -1808,6 +1808,16 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
* Otherwise, we're not in a transaction
*/
ResourceTable savedEntity = updateInternal(theRequest, resource, thePerformIndexing, theForceUpdateVersion, entity, resourceId, oldResource, theTransactionDetails);
if (thePerformIndexing) {
Collection<? extends BaseTag> tagList = Collections.emptyList();
if (entity.isHasTags()) {
tagList = entity.getTags();
}
long version = entity.getVersion();
populateResourceMetadata(entity, false, tagList, version, getResourceType(), resource);
}
DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, resource).setCreated(wasDeleted);
if (!thePerformIndexing) {

View File

@ -311,10 +311,6 @@ public class ResourceExpungeService implements IResourceExpungeService {
if (resource == null || resource.isHasLinks()) {
myResourceLinkDao.deleteByResourceId(theResourceId.getIdAsLong());
}
if (resource == null || resource.isHasTags()) {
myResourceTagDao.deleteByResourceId(theResourceId.getIdAsLong());
}
}
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) {

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;

View File

@ -1035,14 +1035,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
String methodName = "testHistoryOverMultiplePages";
/*
* for (int i = 0; i < 1000; i++) {
* Patient patient = new Patient();
* patient.addName().addFamily(methodName + "__" + i);
* myPatientDao.create(patient).getId().toUnqualifiedVersionless();
* }
*/
Patient patient = new Patient();
patient.addName().addFamily(methodName);
IIdType id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.util.BundleBuilder;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CareTeam;
@ -91,6 +92,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myDaoConfig.setAutoCreatePlaceholderReferenceTargets(new DaoConfig().isAutoCreatePlaceholderReferenceTargets());
myDaoConfig.setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(new DaoConfig().isPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets());
myDaoConfig.setResourceClientIdStrategy(new DaoConfig().getResourceClientIdStrategy());
myDaoConfig.setTagStorageMode(new DaoConfig().getTagStorageMode());
BaseTermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(false);
}
@ -153,6 +155,40 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
}
@Test
public void testUpdateWithChangesAndTags() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED);
IIdType id = runInTransaction(() -> {
Patient p = new Patient();
p.getMeta().addTag("http://system", "foo", "display");
p.addIdentifier().setSystem("urn:system").setValue("2");
return myPatientDao.create(p).getId().toUnqualified();
});
runInTransaction(()->{
assertEquals(1, myResourceTagDao.count());
});
myCaptureQueriesListener.clear();
runInTransaction(() -> {
Patient p = new Patient();
p.setId(id.getIdPart());
p.addIdentifier().setSystem("urn:system").setValue("3");
IBaseResource newRes = myPatientDao.update(p).getResource();
assertEquals(1, newRes.getMeta().getTag().size());
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testRead() {
IIdType id = runInTransaction(() -> {

View File

@ -6,9 +6,11 @@ import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.gclient.TokenClientParam;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Meta;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
@ -36,6 +38,88 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
}
/**
* Make sure tags are preserved
*/
@Test
public void testDeleteResourceWithTags_NonVersionedTags() {
initializeNonVersioned();
// Delete
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
IIdType outcomeId = myPatientDao.delete(new IdType("Patient/A"), mySrd).getId();
assertEquals("3", outcomeId.getVersionIdPart());
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Make sure $meta-get can fetch the tags of the deleted resource
Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
assertEquals("3", meta.getVersionId());
// Revive and verify
Patient patient = new Patient();
patient.setId("A");
patient.getMeta().addProfile("http://profile3");
patient.setActive(true);
myCaptureQueriesListener.clear();
patient = (Patient) myPatientDao.update(patient, mySrd).getResource();
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Read it back
patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
}
/**
* Make sure tags are preserved
*/
@Test
public void testDeleteResourceWithTags_VersionedTags() {
initializeVersioned();
// Delete
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
myPatientDao.delete(new IdType("Patient/A"), mySrd);
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Make sure $meta-get can fetch the tags of the deleted resource
Meta meta = myPatientDao.metaGetOperation(Meta.class, new IdType("Patient/A"), mySrd);
assertThat(toProfiles(meta).toString(), toProfiles(meta), contains("http://profile2"));
assertThat(toTags(meta).toString(), toTags(meta), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
// Revive and verify
Patient patient = new Patient();
patient.setId("A");
patient.getMeta().addProfile("http://profile3");
patient.setActive(true);
myCaptureQueriesListener.clear();
patient = (Patient) myPatientDao.update(patient, mySrd).getResource();
myCaptureQueriesListener.logAllQueries();
runInTransaction(() -> assertEquals(3, myResourceTagDao.count()));
// Read it back
patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), containsInAnyOrder("http://profile3"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag1|vtag1|dtag1", "http://tag2|vtag2|dtag2"));
}
@Test
public void testStoreAndRetrieveNonVersionedTags_Read() {
initializeNonVersioned();
@ -163,7 +247,7 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
runInTransaction(() -> {
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
@ -184,7 +268,7 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
runInTransaction(() -> {
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
@ -350,17 +434,32 @@ public class FhirResourceDaoR4TagsTest extends BaseResourceProviderR4Test {
@Nonnull
private List<String> toTags(Patient patient) {
return patient.getMeta().getTag().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
return toTags(patient.getMeta());
}
@Nonnull
private List<String> toSecurityLabels(Patient patient) {
return patient.getMeta().getSecurity().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
return toSecurityLabels(patient.getMeta());
}
@Nonnull
private List<String> toProfiles(Patient patient) {
return patient.getMeta().getProfile().stream().map(t -> t.getValue()).collect(Collectors.toList());
return toProfiles(patient.getMeta());
}
@Nonnull
private static List<String> toTags(Meta meta) {
return meta.getTag().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
}
@Nonnull
private static List<String> toSecurityLabels(Meta meta) {
return meta.getSecurity().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
}
@Nonnull
private static List<String> toProfiles(Meta meta) {
return meta.getProfile().stream().map(t -> t.getValue()).collect(Collectors.toList());
}
@Nonnull

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class HapiMigrationException extends RuntimeException {
private MigrationResult myResult;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
public interface IHapiMigrationCallback {

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import java.util.ArrayList;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationVersion;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate.dao;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate.dao;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.migrate.entity;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.jdbc.core.RowMapper;