Allow updating externalized binary (#1556)
* Add non working test * Add a test * FIx test * Allow updating existing externalized binary data * Add test * Remove unneeded test * Fix test
This commit is contained in:
parent
9e44049847
commit
19247ffb46
|
@ -66,7 +66,7 @@ ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFail
|
||||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload: {0}
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.interceptor.api.Hook;
|
import ca.uhn.fhir.interceptor.api.Hook;
|
||||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
@ -43,12 +44,13 @@ import javax.annotation.PostConstruct;
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.model.util.JpaConstants.EXT_EXTERNALIZED_BINARY_ID;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
@Interceptor
|
@Interceptor
|
||||||
public class BinaryStorageInterceptor {
|
public class BinaryStorageInterceptor {
|
||||||
|
|
||||||
|
@ -59,7 +61,6 @@ public class BinaryStorageInterceptor {
|
||||||
private FhirContext myCtx;
|
private FhirContext myCtx;
|
||||||
@Autowired
|
@Autowired
|
||||||
private BinaryAccessProvider myBinaryAccessProvider;
|
private BinaryAccessProvider myBinaryAccessProvider;
|
||||||
|
|
||||||
private Class<? extends IPrimitiveType<byte[]>> myBinaryType;
|
private Class<? extends IPrimitiveType<byte[]>> myBinaryType;
|
||||||
private String myDeferredListKey;
|
private String myDeferredListKey;
|
||||||
private long myAutoDeExternalizeMaximumBytes = 10 * FileUtils.ONE_MB;
|
private long myAutoDeExternalizeMaximumBytes = 10 * FileUtils.ONE_MB;
|
||||||
|
@ -109,16 +110,65 @@ public class BinaryStorageInterceptor {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
|
||||||
public void extractLargeBinariesBeforeCreate(ServletRequestDetails theRequestDetails, IBaseResource theResource, Pointcut thePoincut) throws IOException {
|
public void extractLargeBinariesBeforeCreate(ServletRequestDetails theRequestDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
|
||||||
extractLargeBinaries(theRequestDetails, theResource, thePoincut);
|
extractLargeBinaries(theRequestDetails, theResource, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
|
||||||
public void extractLargeBinariesBeforeUpdate(ServletRequestDetails theRequestDetails, IBaseResource theResource, Pointcut thePoincut) throws IOException {
|
public void extractLargeBinariesBeforeUpdate(ServletRequestDetails theRequestDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException {
|
||||||
extractLargeBinaries(theRequestDetails, theResource, thePoincut);
|
blockIllegalExternalBinaryIds(thePreviousResource, theResource);
|
||||||
|
extractLargeBinaries(theRequestDetails, theResource, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void extractLargeBinaries(ServletRequestDetails theRequestDetails, IBaseResource theResource, Pointcut thePoincut) throws IOException {
|
/**
|
||||||
|
* Don't allow clients to submit resources with binary storage attachments declared unless the ID was already in the
|
||||||
|
* resource. In other words, only HAPI itself may add a binary storage ID extension to a resource unless that
|
||||||
|
* extension was already present.
|
||||||
|
*/
|
||||||
|
private void blockIllegalExternalBinaryIds(IBaseResource thePreviousResource, IBaseResource theResource) {
|
||||||
|
Set<String> existingBinaryIds = new HashSet<>();
|
||||||
|
if (thePreviousResource != null) {
|
||||||
|
List<? extends IPrimitiveType<byte[]>> base64fields = myCtx.newTerser().getAllPopulatedChildElementsOfType(thePreviousResource, myBinaryType);
|
||||||
|
for (IPrimitiveType<byte[]> nextBase64 : base64fields) {
|
||||||
|
if (nextBase64 instanceof IBaseHasExtensions) {
|
||||||
|
((IBaseHasExtensions) nextBase64)
|
||||||
|
.getExtension()
|
||||||
|
.stream()
|
||||||
|
.filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null)
|
||||||
|
.filter(t -> EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl()))
|
||||||
|
.map(t -> (IPrimitiveType) t.getValue())
|
||||||
|
.map(t -> t.getValueAsString())
|
||||||
|
.filter(t -> isNotBlank(t))
|
||||||
|
.forEach(t -> existingBinaryIds.add(t));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
List<? extends IPrimitiveType<byte[]>> base64fields = myCtx.newTerser().getAllPopulatedChildElementsOfType(theResource, myBinaryType);
|
||||||
|
for (IPrimitiveType<byte[]> nextBase64 : base64fields) {
|
||||||
|
if (nextBase64 instanceof IBaseHasExtensions) {
|
||||||
|
Optional<String> hasExternalizedBinaryReference = ((IBaseHasExtensions) nextBase64)
|
||||||
|
.getExtension()
|
||||||
|
.stream()
|
||||||
|
.filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null)
|
||||||
|
.filter(t -> t.getUrl().equals(EXT_EXTERNALIZED_BINARY_ID))
|
||||||
|
.map(t->(IPrimitiveType) t.getValue())
|
||||||
|
.map(t->t.getValueAsString())
|
||||||
|
.filter(t->isNotBlank(t))
|
||||||
|
.filter(t->{
|
||||||
|
return !existingBinaryIds.contains(t);
|
||||||
|
}).findFirst();
|
||||||
|
|
||||||
|
if (hasExternalizedBinaryReference.isPresent()) {
|
||||||
|
String msg = myCtx.getLocalizer().getMessage(BaseHapiFhirDao.class, "externalizedBinaryStorageExtensionFoundInRequestBody", EXT_EXTERNALIZED_BINARY_ID, hasExternalizedBinaryReference.get());
|
||||||
|
throw new InvalidRequestException(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void extractLargeBinaries(ServletRequestDetails theRequestDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
|
||||||
if (theRequestDetails == null) {
|
if (theRequestDetails == null) {
|
||||||
// RequestDetails will only be null for internal HAPI events. If externalization is required for them it will need to be done in a different way.
|
// RequestDetails will only be null for internal HAPI events. If externalization is required for them it will need to be done in a different way.
|
||||||
return;
|
return;
|
||||||
|
@ -145,7 +195,7 @@ public class BinaryStorageInterceptor {
|
||||||
StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(resourceId, null, nextContentType, inputStream);
|
StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(resourceId, null, nextContentType, inputStream);
|
||||||
newBlobId = storedDetails.getBlobId();
|
newBlobId = storedDetails.getBlobId();
|
||||||
} else {
|
} else {
|
||||||
assert thePoincut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePoincut.name();
|
assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name();
|
||||||
newBlobId = myBinaryStorageSvc.newBlobId();
|
newBlobId = myBinaryStorageSvc.newBlobId();
|
||||||
List<DeferredBinaryTarget> deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theRequestDetails);
|
List<DeferredBinaryTarget> deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theRequestDetails);
|
||||||
DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data);
|
DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data);
|
||||||
|
|
|
@ -95,8 +95,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
|
|
||||||
private String myResourceName;
|
private String myResourceName;
|
||||||
private Class<T> myResourceType;
|
private Class<T> myResourceType;
|
||||||
private String mySecondaryPrimaryKeyParamName;
|
|
||||||
private Class<? extends IPrimitiveType<byte[]>> myBase64Type;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel, RequestDetails theRequest) {
|
public void addTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel, RequestDetails theRequest) {
|
||||||
|
@ -824,21 +822,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
return update(destinationCasted, null, true, theRequest);
|
return update(destinationCasted, null, true, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
@Override
|
||||||
|
public void start() {
|
||||||
|
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
|
||||||
|
super.start();
|
||||||
|
}
|
||||||
|
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
public void postConstruct() {
|
public void postConstruct() {
|
||||||
RuntimeResourceDefinition def = getContext().getResourceDefinition(myResourceType);
|
RuntimeResourceDefinition def = getContext().getResourceDefinition(myResourceType);
|
||||||
myResourceName = def.getName();
|
myResourceName = def.getName();
|
||||||
|
|
||||||
if (mySecondaryPrimaryKeyParamName != null) {
|
|
||||||
RuntimeSearchParam sp = mySearchParamRegistry.getSearchParamByName(def, mySecondaryPrimaryKeyParamName);
|
|
||||||
if (sp == null) {
|
|
||||||
throw new ConfigurationException("Unknown search param on resource[" + myResourceName + "] for secondary key[" + mySecondaryPrimaryKeyParamName + "]");
|
|
||||||
}
|
|
||||||
if (sp.getParamType() != RestSearchParameterTypeEnum.TOKEN) {
|
|
||||||
throw new ConfigurationException("Search param on resource[" + myResourceName + "] for secondary key[" + mySecondaryPrimaryKeyParamName + "] is not a token type, only token is supported");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -883,25 +877,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Don't allow clients to submit resources with binary storage attachments present.
|
|
||||||
*/
|
|
||||||
List<? extends IPrimitiveType<byte[]>> base64fields = getContext().newTerser().getAllPopulatedChildElementsOfType(theResource, myBase64Type);
|
|
||||||
for (IPrimitiveType<byte[]> nextBase64 : base64fields) {
|
|
||||||
if (nextBase64 instanceof IBaseHasExtensions) {
|
|
||||||
boolean hasExternalizedBinaryReference = ((IBaseHasExtensions) nextBase64)
|
|
||||||
.getExtension()
|
|
||||||
.stream()
|
|
||||||
.filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null)
|
|
||||||
.anyMatch(t -> t.getUrl().equals(EXT_EXTERNALIZED_BINARY_ID));
|
|
||||||
if (hasExternalizedBinaryReference) {
|
|
||||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirDao.class, "externalizedBinaryStorageExtensionFoundInRequestBody", EXT_EXTERNALIZED_BINARY_ID);
|
|
||||||
throw new InvalidRequestException(msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1197,21 +1172,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* If set, the given param will be treated as a secondary primary key, and multiple resources will not be able to share the same value.
|
|
||||||
*/
|
|
||||||
public void setSecondaryPrimaryKeyParamName(String theSecondaryPrimaryKeyParamName) {
|
|
||||||
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
@PostConstruct
|
|
||||||
public void start() {
|
|
||||||
super.start();
|
|
||||||
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
|
|
||||||
myBase64Type = (Class<? extends IPrimitiveType<byte[]>>) getContext().getElementDefinition("base64Binary").getImplementingClass();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
|
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
|
||||||
MT retVal;
|
MT retVal;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -236,6 +236,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Qualifier("myBinaryDaoR4")
|
@Qualifier("myBinaryDaoR4")
|
||||||
protected IFhirResourceDao<Binary> myBinaryDao;
|
protected IFhirResourceDao<Binary> myBinaryDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@Qualifier("myDocumentReferenceDaoR4")
|
||||||
|
protected IFhirResourceDao<DocumentReference> myDocumentReferenceDao;
|
||||||
|
@Autowired
|
||||||
@Qualifier("myPatientDaoR4")
|
@Qualifier("myPatientDaoR4")
|
||||||
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -278,23 +279,20 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Stores a binary large enough that it should live in binary storage
|
|
||||||
*/
|
|
||||||
@Test
|
@Test
|
||||||
public void testDontAllowUpdateWithAttachmentId() {
|
public void testDontAllowUpdateWithAttachmentId_NoneExists() {
|
||||||
|
|
||||||
DocumentReference dr = new DocumentReference();
|
DocumentReference dr = new DocumentReference();
|
||||||
dr.addContent()
|
dr.addContent()
|
||||||
.getAttachment()
|
.getAttachment()
|
||||||
.getDataElement()
|
.getDataElement()
|
||||||
.addExtension(JpaConstants.EXT_EXTERNALIZED_BINARY_ID, new StringType("XUcR1qL9p4i8Ck6L2RzVbSpHv1ZPHYuBSd50LR39nzhDDR4N4efJ7Q0ywZLyzcLeLIPi60UvCCNmdZBlbyvT1KfNaDeHV1zBHbDp") );
|
.addExtension(JpaConstants.EXT_EXTERNALIZED_BINARY_ID, new StringType("0000-1111") );
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ourClient.create().resource(dr).execute();
|
ourClient.create().resource(dr).execute();
|
||||||
fail();
|
fail();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertThat(e.getMessage(), containsString("Illegal extension found in request payload: http://hapifhir.io/fhir/StructureDefinition/externalized-binary-id"));
|
assertThat(e.getMessage(), containsString("Can not find the requested binary content. It may have been deleted."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,13 @@ import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.model.Binary;
|
import org.hl7.fhir.r4.model.Binary;
|
||||||
|
import org.hl7.fhir.r4.model.DocumentReference;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
|
import org.hl7.fhir.r4.model.StringType;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -15,8 +20,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.hamcrest.Matchers.not;
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
||||||
|
@ -183,6 +187,102 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdatePreservingExistingExternalizedBinary() {
|
||||||
|
|
||||||
|
// Create a resource with a big enough docRef
|
||||||
|
DocumentReference docRef = new DocumentReference();
|
||||||
|
DocumentReference.DocumentReferenceContentComponent content = docRef.addContent();
|
||||||
|
content.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content.getAttachment().setData(SOME_BYTES);
|
||||||
|
DocumentReference.DocumentReferenceContentComponent content2 = docRef.addContent();
|
||||||
|
content2.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content2.getAttachment().setData(SOME_BYTES_2);
|
||||||
|
DaoMethodOutcome outcome = myDocumentReferenceDao.create(docRef, mySrd);
|
||||||
|
|
||||||
|
// Make sure it was externalized
|
||||||
|
IIdType id = outcome.getId().toUnqualifiedVersionless();
|
||||||
|
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getResource());
|
||||||
|
ourLog.info("Encoded: {}", encoded);
|
||||||
|
assertThat(encoded, containsString(JpaConstants.EXT_EXTERNALIZED_BINARY_ID));
|
||||||
|
assertThat(encoded, not(containsString("\"data\"")));
|
||||||
|
String binaryId = docRef.getContentFirstRep().getAttachment().getDataElement().getExtensionString(JpaConstants.EXT_EXTERNALIZED_BINARY_ID);
|
||||||
|
assertThat(binaryId, not(blankOrNullString()));
|
||||||
|
|
||||||
|
// Now update
|
||||||
|
docRef = new DocumentReference();
|
||||||
|
docRef.setId(id.toUnqualifiedVersionless());
|
||||||
|
docRef.setStatus(Enumerations.DocumentReferenceStatus.CURRENT);
|
||||||
|
docRef.getContentFirstRep().getAttachment().setContentType("application/octet-stream");
|
||||||
|
docRef.getContentFirstRep().getAttachment().getDataElement().addExtension(
|
||||||
|
JpaConstants.EXT_EXTERNALIZED_BINARY_ID,
|
||||||
|
new StringType(binaryId)
|
||||||
|
);
|
||||||
|
outcome = myDocumentReferenceDao.update(docRef, mySrd);
|
||||||
|
assertEquals("2", outcome.getId().getVersionIdPart());
|
||||||
|
|
||||||
|
// Now read it back the first version
|
||||||
|
DocumentReference output = myDocumentReferenceDao.read(id.withVersion("1"), mySrd);
|
||||||
|
assertEquals("application/octet-stream", output.getContentFirstRep().getAttachment().getContentType());
|
||||||
|
assertArrayEquals(SOME_BYTES, output.getContentFirstRep().getAttachment().getData());
|
||||||
|
|
||||||
|
// Now read back the second version
|
||||||
|
output = myDocumentReferenceDao.read(id.withVersion("2"), mySrd);
|
||||||
|
assertEquals("application/octet-stream", output.getContentFirstRep().getAttachment().getContentType());
|
||||||
|
assertArrayEquals(SOME_BYTES, output.getContentFirstRep().getAttachment().getData());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdateRejectsIncorrectBinary() {
|
||||||
|
|
||||||
|
// Create a resource with a big enough docRef
|
||||||
|
DocumentReference docRef = new DocumentReference();
|
||||||
|
DocumentReference.DocumentReferenceContentComponent content = docRef.addContent();
|
||||||
|
content.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content.getAttachment().setData(SOME_BYTES);
|
||||||
|
DocumentReference.DocumentReferenceContentComponent content2 = docRef.addContent();
|
||||||
|
content2.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content2.getAttachment().setData(SOME_BYTES_2);
|
||||||
|
DaoMethodOutcome outcome = myDocumentReferenceDao.create(docRef, mySrd);
|
||||||
|
|
||||||
|
// Make sure it was externalized
|
||||||
|
IIdType id = outcome.getId().toUnqualifiedVersionless();
|
||||||
|
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getResource());
|
||||||
|
ourLog.info("Encoded: {}", encoded);
|
||||||
|
assertThat(encoded, containsString(JpaConstants.EXT_EXTERNALIZED_BINARY_ID));
|
||||||
|
assertThat(encoded, not(containsString("\"data\"")));
|
||||||
|
String binaryId = docRef.getContentFirstRep().getAttachment().getDataElement().getExtensionString(JpaConstants.EXT_EXTERNALIZED_BINARY_ID);
|
||||||
|
assertThat(binaryId, not(blankOrNullString()));
|
||||||
|
|
||||||
|
// Now update
|
||||||
|
docRef = new DocumentReference();
|
||||||
|
docRef.setId(id.toUnqualifiedVersionless());
|
||||||
|
docRef.setStatus(Enumerations.DocumentReferenceStatus.CURRENT);
|
||||||
|
content = docRef.addContent();
|
||||||
|
content.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content.getAttachment().getDataElement().addExtension(
|
||||||
|
JpaConstants.EXT_EXTERNALIZED_BINARY_ID,
|
||||||
|
new StringType(binaryId)
|
||||||
|
);
|
||||||
|
content2 = docRef.addContent();
|
||||||
|
content2.getAttachment().setContentType("application/octet-stream");
|
||||||
|
content2.getAttachment().getDataElement().addExtension(
|
||||||
|
JpaConstants.EXT_EXTERNALIZED_BINARY_ID,
|
||||||
|
new StringType("12345-67890")
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
myDocumentReferenceDao.update(docRef, mySrd);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
assertEquals("Illegal extension found in request payload - URL \"http://hapifhir.io/fhir/StructureDefinition/externalized-binary-id\" and value \"12345-67890\"", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRetrieveBinaryAboveRetrievalThreshold() {
|
public void testRetrieveBinaryAboveRetrievalThreshold() {
|
||||||
myBinaryStorageInterceptor.setAutoDeExternalizeMaximumBytes(5);
|
myBinaryStorageInterceptor.setAutoDeExternalizeMaximumBytes(5);
|
||||||
|
|
|
@ -135,6 +135,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(output, containsString("Invalid parameter chain: focalAccess.a ne e"));
|
assertThat(output, containsString("Invalid parameter chain: focalAccess.a ne e"));
|
||||||
assertEquals(400, resp.getStatusLine().getStatusCode());
|
assertEquals(400, resp.getStatusLine().getStatusCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue