Merging master into working branch.
# Conflicts: # hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
This commit is contained in:
commit
97a235ddfc
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.rest.api;
|
|||
*/
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
public class Constants {
|
||||
|
@ -168,6 +169,7 @@ public class Constants {
|
|||
public static final String PARAM_SORT = "_sort";
|
||||
public static final String PARAM_SORT_ASC = "_sort:asc";
|
||||
public static final String PARAM_SORT_DESC = "_sort:desc";
|
||||
public static final String PARAM_SOURCE = "_source";
|
||||
public static final String PARAM_SUMMARY = "_summary";
|
||||
public static final String PARAM_TAG = "_tag";
|
||||
public static final String PARAM_TAGS = "_tags";
|
||||
|
@ -220,10 +222,14 @@ public class Constants {
|
|||
public static final String CACHE_CONTROL_PRIVATE = "private";
|
||||
public static final int STATUS_HTTP_412_PAYLOAD_TOO_LARGE = 413;
|
||||
public static final String OPERATION_NAME_GRAPHQL = "$graphql";
|
||||
/**
|
||||
* Note that this constant is used in a number of places including DB column lengths! Be careful if you decide to change it.
|
||||
*/
|
||||
public static final int REQUEST_ID_LENGTH = 16;
|
||||
|
||||
static {
|
||||
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);
|
||||
CHARSET_US_ASCII = Charset.forName("ISO-8859-1");
|
||||
CHARSET_UTF8 = StandardCharsets.UTF_8;
|
||||
CHARSET_US_ASCII = StandardCharsets.ISO_8859_1;
|
||||
|
||||
HashMap<Integer, String> statusNames = new HashMap<>();
|
||||
statusNames.put(200, "OK");
|
||||
|
@ -239,7 +245,6 @@ public class Constants {
|
|||
statusNames.put(300, "Multiple Choices");
|
||||
statusNames.put(301, "Moved Permanently");
|
||||
statusNames.put(302, "Found");
|
||||
statusNames.put(302, "Moved Temporarily");
|
||||
statusNames.put(303, "See Other");
|
||||
statusNames.put(304, "Not Modified");
|
||||
statusNames.put(305, "Use Proxy");
|
||||
|
@ -259,9 +264,7 @@ public class Constants {
|
|||
statusNames.put(411, "Length Required");
|
||||
statusNames.put(412, "Precondition Failed");
|
||||
statusNames.put(413, "Payload Too Large");
|
||||
statusNames.put(413, "Request Entity Too Large");
|
||||
statusNames.put(414, "URI Too Long");
|
||||
statusNames.put(414, "Request-URI Too Long");
|
||||
statusNames.put(415, "Unsupported Media Type");
|
||||
statusNames.put(416, "Requested range not satisfiable");
|
||||
statusNames.put(417, "Expectation Failed");
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseMetaType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class MetaUtil {
|
||||
|
||||
private MetaUtil() {
|
||||
// non-instantiable
|
||||
}
|
||||
|
||||
public static String getSource(FhirContext theContext, IBaseMetaType theMeta) {
|
||||
BaseRuntimeElementCompositeDefinition<?> elementDef = (BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(theMeta.getClass());
|
||||
BaseRuntimeChildDefinition sourceChild = elementDef.getChildByName("source");
|
||||
List<IBase> sourceValues = sourceChild.getAccessor().getValues(theMeta);
|
||||
String retVal = null;
|
||||
if (sourceValues.size() > 0) {
|
||||
retVal = ((IPrimitiveType<?>) sourceValues.get(0)).getValueAsString();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static void setSource(FhirContext theContext, IBaseMetaType theMeta, String theValue) {
|
||||
BaseRuntimeElementCompositeDefinition<?> elementDef = (BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(theMeta.getClass());
|
||||
BaseRuntimeChildDefinition sourceChild = elementDef.getChildByName("source");
|
||||
List<IBase> sourceValues = sourceChild.getAccessor().getValues(theMeta);
|
||||
IPrimitiveType<?> sourceElement;
|
||||
if (sourceValues.size() > 0) {
|
||||
sourceElement = ((IPrimitiveType<?>) sourceValues.get(0));
|
||||
} else {
|
||||
sourceElement = (IPrimitiveType<?>) theContext.getElementDefinition("uri").newInstance();
|
||||
sourceChild.getMutator().setValue(theMeta, sourceElement);
|
||||
}
|
||||
sourceElement.setValueAsString(theValue);
|
||||
}
|
||||
|
||||
}
|
|
@ -57,4 +57,5 @@ public interface IBaseMetaType extends ICompositeType {
|
|||
*/
|
||||
IBaseCoding getSecurity(String theSystem, String theCode);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -105,6 +105,7 @@ ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPa
|
|||
|
||||
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
|
||||
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
|
||||
ca.uhn.fhir.jpa.dao.SearchBuilder.sourceParamDisabled=The _source parameter is disabled on this server
|
||||
|
||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
|
||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
|
||||
|
@ -113,7 +114,7 @@ ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches fou
|
|||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.successMsg=Cascaded delete to {0} resources: {1}
|
||||
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascading deletes are not active for this request. You can enable cascading deletes by using the "_cascade=true" URL parameter.
|
||||
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascading deletes are not active for this request. You can enable cascading deletes by using the "_cascade=delete" URL parameter.
|
||||
|
||||
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
||||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.noAttachmentDataPresent=The resource with ID {0} has no data at path: {1}
|
||||
|
|
|
@ -69,6 +69,11 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter {
|
|||
|
||||
@Override
|
||||
public boolean outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException {
|
||||
IBaseResource responseResource = theResponseDetails.getResponseResource();
|
||||
if (responseResource == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
String[] formatParams = theRequestDetails.getParameters().get(Constants.PARAM_FORMAT);
|
||||
String accept = null;
|
||||
if (formatParams != null && formatParams.length > 0) {
|
||||
|
@ -92,7 +97,6 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter {
|
|||
wantVersion = FhirVersionEnum.forVersionString(wantVersionString);
|
||||
}
|
||||
|
||||
IBaseResource responseResource = theResponseDetails.getResponseResource();
|
||||
FhirVersionEnum haveVersion = responseResource.getStructureFhirVersionEnum();
|
||||
|
||||
IBaseResource converted = null;
|
||||
|
|
|
@ -146,7 +146,11 @@
|
|||
<artifactId>logback-classic</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
|
|
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.MetaUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.XmlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -130,6 +131,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
protected IResourceProvenanceDao myResourceProvenanceDao;
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
@Autowired
|
||||
protected ISearchParamRegistry mySerarchParamRegistry;
|
||||
|
@ -282,6 +285,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void findMatchingTagIds(RequestDetails theRequest, String theResourceName, IIdType theResourceId, Set<Long> tagIds, Class<? extends BaseTag> entityClass) {
|
||||
|
@ -611,7 +615,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
if (theEntity.getId() == null) {
|
||||
changed = true;
|
||||
} else {
|
||||
ResourceHistoryTable currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
||||
ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity();
|
||||
if (currentHistoryVersion == null) {
|
||||
currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), theEntity.getVersion());
|
||||
}
|
||||
if (currentHistoryVersion == null || currentHistoryVersion.getResource() == null) {
|
||||
changed = true;
|
||||
} else {
|
||||
|
@ -832,11 +839,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
metaSnapshotModeTokens = Collections.singleton(TagTypeEnum.PROFILE);
|
||||
}
|
||||
|
||||
if (metaSnapshotModeTokens.contains(theTag.getTag().getTagType())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return metaSnapshotModeTokens.contains(theTag.getTag().getTagType());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -851,10 +854,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
|
||||
|
||||
// 1. get resource, it's encoding and the tags if any
|
||||
byte[] resourceBytes = null;
|
||||
ResourceEncodingEnum resourceEncoding = null;
|
||||
Collection<? extends BaseTag> myTagList = null;
|
||||
Long version = null;
|
||||
byte[] resourceBytes;
|
||||
ResourceEncodingEnum resourceEncoding;
|
||||
Collection<? extends BaseTag> myTagList;
|
||||
Long version;
|
||||
String provenanceSourceUri = null;
|
||||
String provenanceRequestId = null;
|
||||
|
||||
if (theEntity instanceof ResourceHistoryTable) {
|
||||
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
|
||||
|
@ -862,14 +867,20 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
resourceEncoding = history.getEncoding();
|
||||
myTagList = history.getTags();
|
||||
version = history.getVersion();
|
||||
if (history.getProvenance() != null) {
|
||||
provenanceRequestId = history.getProvenance().getRequestId();
|
||||
provenanceSourceUri = history.getProvenance().getSourceUri();
|
||||
}
|
||||
} else if (theEntity instanceof ResourceTable) {
|
||||
ResourceTable resource = (ResourceTable) theEntity;
|
||||
version = theEntity.getVersion();
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), version);
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
((ResourceTable)theEntity).setCurrentVersionEntity(history);
|
||||
|
||||
while (history == null) {
|
||||
if (version > 1L) {
|
||||
version--;
|
||||
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), version);
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -878,12 +889,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
resourceEncoding = history.getEncoding();
|
||||
myTagList = resource.getTags();
|
||||
version = history.getVersion();
|
||||
if (history.getProvenance() != null) {
|
||||
provenanceRequestId = history.getProvenance().getRequestId();
|
||||
provenanceSourceUri = history.getProvenance().getSourceUri();
|
||||
}
|
||||
} else if (theEntity instanceof ResourceSearchView) {
|
||||
// This is the search View
|
||||
ResourceSearchView myView = (ResourceSearchView) theEntity;
|
||||
resourceBytes = myView.getResource();
|
||||
resourceEncoding = myView.getEncoding();
|
||||
version = myView.getVersion();
|
||||
ResourceSearchView view = (ResourceSearchView) theEntity;
|
||||
resourceBytes = view.getResource();
|
||||
resourceEncoding = view.getEncoding();
|
||||
version = view.getVersion();
|
||||
provenanceRequestId = view.getProvenanceRequestId();
|
||||
provenanceSourceUri = view.getProvenanceSourceUri();
|
||||
if (theTagList == null)
|
||||
myTagList = new HashSet<>();
|
||||
else
|
||||
|
@ -954,6 +971,23 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res, version);
|
||||
}
|
||||
|
||||
// 6. Handle source (provenance)
|
||||
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
|
||||
String sourceString = defaultString(provenanceSourceUri)
|
||||
+ (isNotBlank(provenanceRequestId) ? "#" : "")
|
||||
+ defaultString(provenanceRequestId);
|
||||
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
IBaseExtension<?, ?> sourceExtension = ((IBaseHasExtensions) retVal.getMeta()).addExtension();
|
||||
sourceExtension.setUrl(JpaConstants.EXT_META_SOURCE);
|
||||
IPrimitiveType<String> value = (IPrimitiveType<String>) myContext.getElementDefinition("uri").newInstance();
|
||||
value.setValue(sourceString);
|
||||
sourceExtension.setValue(value);
|
||||
} else if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
MetaUtil.setSource(myContext, retVal.getMeta(), sourceString);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -1097,6 +1131,40 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||
if (theResource != null) {
|
||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
source = MetaUtil.getSource(myContext, meta);
|
||||
}
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> JpaConstants.EXT_META_SOURCE.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> ((IPrimitiveType) t.getValue()).getValueAsString())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
provenance.setResourceTable(theEntity);
|
||||
if (haveRequestId) {
|
||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||
}
|
||||
if (haveSource) {
|
||||
provenance.setSourceUri(source);
|
||||
}
|
||||
myEntityManager.persist(provenance);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -220,6 +220,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
StopWatch w = new StopWatch();
|
||||
|
||||
T resourceToDelete = toResource(myResourceType, entity, null, false);
|
||||
theDeleteConflicts.setResourceIdMarkedForDeletion(theId);
|
||||
|
||||
// Notify IServerOperationInterceptors about pre-action call
|
||||
HookParams hook = new HookParams()
|
||||
|
@ -268,6 +269,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Override
|
||||
public DaoMethodOutcome delete(IIdType theId, RequestDetails theRequestDetails) {
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
if (theId != null && isNotBlank(theId.getValue())) {
|
||||
deleteConflicts.setResourceIdMarkedForDeletion(theId);
|
||||
}
|
||||
|
||||
StopWatch w = new StopWatch();
|
||||
|
||||
DaoMethodOutcome retVal = delete(theId, deleteConflicts, theRequestDetails);
|
||||
|
@ -671,7 +676,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
doMetaAdd(theMetaAdd, latestVersion);
|
||||
|
||||
// Also update history entry
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(entity.getId(), entity.getVersion());
|
||||
doMetaAdd(theMetaAdd, history);
|
||||
}
|
||||
|
||||
|
@ -703,7 +708,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
doMetaDelete(theMetaDel, latestVersion);
|
||||
|
||||
// Also update history entry
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(entity.getId(), entity.getVersion());
|
||||
doMetaDelete(theMetaDel, history);
|
||||
}
|
||||
|
||||
|
|
|
@ -151,6 +151,7 @@ public class DaoConfig {
|
|||
*/
|
||||
private boolean myPreExpandValueSetsExperimental = true; // FIXME: DM 2019-08-19 - Return to false;
|
||||
private boolean myFilterParameterEnabled = false;
|
||||
private StoreMetaSourceInformation myStoreMetaSourceInformation = StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID;
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}!
|
||||
*/
|
||||
|
@ -986,6 +987,7 @@ public class DaoConfig {
|
|||
* and other FHIR features may not behave as expected when referential integrity is not
|
||||
* preserved. Use this feature with caution.
|
||||
* </p>
|
||||
*
|
||||
* @see CascadingDeleteInterceptor
|
||||
*/
|
||||
public boolean isEnforceReferentialIntegrityOnDelete() {
|
||||
|
@ -1000,6 +1002,7 @@ public class DaoConfig {
|
|||
* and other FHIR features may not behave as expected when referential integrity is not
|
||||
* preserved. Use this feature with caution.
|
||||
* </p>
|
||||
*
|
||||
* @see CascadingDeleteInterceptor
|
||||
*/
|
||||
public void setEnforceReferentialIntegrityOnDelete(boolean theEnforceReferentialIntegrityOnDelete) {
|
||||
|
@ -1100,16 +1103,16 @@ public class DaoConfig {
|
|||
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
|
||||
* expunge operation.
|
||||
*/
|
||||
public void setExpungeBatchSize(int theExpungeBatchSize) {
|
||||
myExpungeBatchSize = theExpungeBatchSize;
|
||||
public int getExpungeBatchSize() {
|
||||
return myExpungeBatchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
|
||||
* expunge operation.
|
||||
*/
|
||||
public int getExpungeBatchSize() {
|
||||
return myExpungeBatchSize;
|
||||
public void setExpungeBatchSize(int theExpungeBatchSize) {
|
||||
myExpungeBatchSize = theExpungeBatchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1668,6 +1671,54 @@ public class DaoConfig {
|
|||
myFilterParameterEnabled = theFilterParameterEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resource source information (<code>Resource.meta.source</code>) will be persisted along with
|
||||
* each resource. This adds extra table and index space so it should be disabled if it is not being
|
||||
* used.
|
||||
* <p>
|
||||
* Default is {@link StoreMetaSourceInformation#SOURCE_URI_AND_REQUEST_ID}
|
||||
* </p>
|
||||
*/
|
||||
public StoreMetaSourceInformation getStoreMetaSourceInformation() {
|
||||
return myStoreMetaSourceInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resource source information (<code>Resource.meta.source</code>) will be persisted along with
|
||||
* each resource. This adds extra table and index space so it should be disabled if it is not being
|
||||
* used.
|
||||
* <p>
|
||||
* Default is {@link StoreMetaSourceInformation#SOURCE_URI_AND_REQUEST_ID}
|
||||
* </p>
|
||||
*/
|
||||
public void setStoreMetaSourceInformation(StoreMetaSourceInformation theStoreMetaSourceInformation) {
|
||||
Validate.notNull(theStoreMetaSourceInformation, "theStoreMetaSourceInformation must not be null");
|
||||
myStoreMetaSourceInformation = theStoreMetaSourceInformation;
|
||||
}
|
||||
|
||||
public enum StoreMetaSourceInformation {
|
||||
NONE(false, false),
|
||||
SOURCE_URI(true, false),
|
||||
REQUEST_ID(false, true),
|
||||
SOURCE_URI_AND_REQUEST_ID(true, true);
|
||||
|
||||
private final boolean myStoreSourceUri;
|
||||
private final boolean myStoreRequestId;
|
||||
|
||||
StoreMetaSourceInformation(boolean theStoreSourceUri, boolean theStoreRequestId) {
|
||||
myStoreSourceUri = theStoreSourceUri;
|
||||
myStoreRequestId = theStoreRequestId;
|
||||
}
|
||||
|
||||
public boolean isStoreSourceUri() {
|
||||
return myStoreSourceUri;
|
||||
}
|
||||
|
||||
public boolean isStoreRequestId() {
|
||||
return myStoreRequestId;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL - Do not use in production!
|
||||
* <p>
|
||||
|
|
|
@ -823,6 +823,48 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
return null;
|
||||
}
|
||||
|
||||
|
||||
private Predicate addPredicateSource(List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation, RequestDetails theRequest) {
|
||||
if (myDaoConfig.getStoreMetaSourceInformation() == DaoConfig.StoreMetaSourceInformation.NONE) {
|
||||
String msg = myContext.getLocalizer().getMessage(SearchBuilder.class, "sourceParamDisabled");
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
|
||||
Join<ResourceTable, ResourceHistoryProvenanceEntity> join = myResourceTableRoot.join("myProvenance", JoinType.LEFT);
|
||||
|
||||
List<Predicate> codePredicates = new ArrayList<>();
|
||||
|
||||
for (IQueryParameterType nextParameter : theList) {
|
||||
String nextParamValue = nextParameter.getValueAsQueryToken(myContext);
|
||||
int lastHashValueIndex = nextParamValue.lastIndexOf('#');
|
||||
String sourceUri;
|
||||
String requestId;
|
||||
if (lastHashValueIndex == -1) {
|
||||
sourceUri = nextParamValue;
|
||||
requestId = null;
|
||||
} else {
|
||||
sourceUri = nextParamValue.substring(0, lastHashValueIndex);
|
||||
requestId = nextParamValue.substring(lastHashValueIndex + 1);
|
||||
}
|
||||
requestId = left(requestId, Constants.REQUEST_ID_LENGTH);
|
||||
|
||||
Predicate sourceUriPredicate = myBuilder.equal(join.get("mySourceUri"), sourceUri);
|
||||
Predicate requestIdPredicate = myBuilder.equal(join.get("myRequestId"), requestId);
|
||||
if (isNotBlank(sourceUri) && isNotBlank(requestId)) {
|
||||
codePredicates.add(myBuilder.and(sourceUriPredicate, requestIdPredicate));
|
||||
} else if (isNotBlank(sourceUri)) {
|
||||
codePredicates.add(sourceUriPredicate);
|
||||
} else if (isNotBlank(requestId)) {
|
||||
codePredicates.add(requestIdPredicate);
|
||||
}
|
||||
}
|
||||
|
||||
Predicate retVal = myBuilder.or(toArray(codePredicates));
|
||||
myPredicates.add(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
private Predicate addPredicateString(String theResourceName,
|
||||
String theParamName,
|
||||
List<? extends IQueryParameterType> theList) {
|
||||
|
@ -2680,6 +2722,14 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
} else {
|
||||
throw new InvalidRequestException("Unexpected search parameter type encountered, expected string type for language search");
|
||||
}
|
||||
} else if (searchParam.getName().equals(Constants.PARAM_SOURCE)) {
|
||||
if (searchParam.getParamType() == RestSearchParameterTypeEnum.TOKEN) {
|
||||
TokenParam param = new TokenParam();
|
||||
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
||||
return addPredicateSource(Collections.singletonList(param), theFilter.getOperation(), theRequest);
|
||||
} else {
|
||||
throw new InvalidRequestException("Unexpected search parameter type encountered, expected token type for _id search");
|
||||
}
|
||||
}
|
||||
// else if ((searchParam.getName().equals(Constants.PARAM_TAG)) ||
|
||||
// (searchParam.equals(Constants.PARAM_SECURITY))) {
|
||||
|
@ -2798,6 +2848,12 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
|
||||
addPredicateTag(theAndOrParams, theParamName);
|
||||
|
||||
} else if (theParamName.equals(Constants.PARAM_SOURCE)) {
|
||||
|
||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||
addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
|
||||
|
|
|
@ -66,8 +66,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
|
|||
@Param("type") String theType
|
||||
);
|
||||
|
||||
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
|
||||
ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion);
|
||||
@Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
|
||||
ResourceHistoryTable findForIdAndVersionAndFetchProvenance(@Param("id") long theId, @Param("version") long theVersion);
|
||||
|
||||
@Query("SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion")
|
||||
Slice<Long> findForResourceId(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public interface IResourceProvenanceDao extends JpaRepository<ResourceHistoryProvenanceEntity, Long> {
|
||||
|
||||
}
|
|
@ -120,6 +120,7 @@ public class ExpungeEverythingService {
|
|||
counter.addAndGet(expungeEverythingByType(ResourceHistoryTag.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceTag.class));
|
||||
counter.addAndGet(expungeEverythingByType(TagDefinition.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceHistoryProvenanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceHistoryTable.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceTable.class));
|
||||
myTxTemplate.execute(t -> {
|
||||
|
|
|
@ -87,6 +87,8 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private IResourceProvenanceDao myResourceHistoryProvenanceTableDao;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
|
@ -94,7 +96,7 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
Pageable page = PageRequest.of(0, theRemainingCount);
|
||||
if (theResourceId != null) {
|
||||
if (theVersion != null) {
|
||||
return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
|
||||
return toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourceId, theVersion));
|
||||
} else {
|
||||
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theResourceId);
|
||||
}
|
||||
|
@ -146,6 +148,10 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
callHooks(theRequestDetails, theRemainingCount, version, id);
|
||||
|
||||
if (version.getProvenance() != null) {
|
||||
myResourceHistoryProvenanceTableDao.delete(version.getProvenance());
|
||||
}
|
||||
|
||||
myResourceHistoryTagDao.deleteAll(version.getTags());
|
||||
myResourceHistoryTableDao.delete(version);
|
||||
|
||||
|
@ -194,7 +200,7 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
private void expungeCurrentVersionOfResource(RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) {
|
||||
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new);
|
||||
|
||||
ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion());
|
||||
ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resource.getId(), resource.getVersion());
|
||||
if (currentVersion != null) {
|
||||
expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount);
|
||||
}
|
||||
|
|
|
@ -21,14 +21,43 @@ package ca.uhn.fhir.jpa.delete;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.util.DeleteConflict;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class DeleteConflictList implements Iterable<DeleteConflict> {
|
||||
private final List<DeleteConflict> myList = new ArrayList<>();
|
||||
private final Set<String> myResourceIdsMarkedForDeletion;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public DeleteConflictList() {
|
||||
myResourceIdsMarkedForDeletion = new HashSet<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor that shares (i.e. uses the same list, as opposed to cloning it)
|
||||
* of {@link #isResourceIdMarkedForDeletion(IIdType) resources marked for deletion}
|
||||
*/
|
||||
public DeleteConflictList(DeleteConflictList theParentList) {
|
||||
myResourceIdsMarkedForDeletion = theParentList.myResourceIdsMarkedForDeletion;
|
||||
}
|
||||
|
||||
|
||||
public boolean isResourceIdMarkedForDeletion(IIdType theIdType) {
|
||||
Validate.notNull(theIdType);
|
||||
Validate.notBlank(theIdType.toUnqualifiedVersionless().getValue());
|
||||
return myResourceIdsMarkedForDeletion.contains(theIdType.toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
||||
public void setResourceIdMarkedForDeletion(IIdType theIdType) {
|
||||
Validate.notNull(theIdType);
|
||||
Validate.notBlank(theIdType.toUnqualifiedVersionless().getValue());
|
||||
myResourceIdsMarkedForDeletion.add(theIdType.toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
||||
public void add(DeleteConflict theDeleteConflict) {
|
||||
myList.add(theDeleteConflict);
|
||||
|
|
|
@ -42,11 +42,8 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
public class DeleteConflictService {
|
||||
|
@ -67,7 +64,11 @@ public class DeleteConflictService {
|
|||
protected IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
||||
public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest) {
|
||||
DeleteConflictList newConflicts = new DeleteConflictList();
|
||||
|
||||
// We want the list of resources that are marked to be the same list even as we
|
||||
// drill into conflict resolution stacks.. this allows us to not get caught by
|
||||
// circular references
|
||||
DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts);
|
||||
|
||||
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
|
||||
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
|
||||
|
@ -104,6 +105,10 @@ public class DeleteConflictService {
|
|||
|
||||
addConflictsToList(theDeleteConflicts, theEntity, theResultList);
|
||||
|
||||
if (theDeleteConflicts.isEmpty()) {
|
||||
return new DeleteConflictOutcome();
|
||||
}
|
||||
|
||||
// Notify Interceptors about pre-action call
|
||||
HookParams hooks = new HookParams()
|
||||
.add(DeleteConflictList.class, theDeleteConflicts)
|
||||
|
@ -117,6 +122,12 @@ public class DeleteConflictService {
|
|||
IdDt targetId = theEntity.getIdDt();
|
||||
IdDt sourceId = link.getSourceResource().getIdDt();
|
||||
String sourcePath = link.getSourcePath();
|
||||
if (theDeleteConflicts.isResourceIdMarkedForDeletion(sourceId)) {
|
||||
if (theDeleteConflicts.isResourceIdMarkedForDeletion(targetId)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
theDeleteConflicts.add(new DeleteConflict(sourceId, sourcePath, targetId));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
|||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -34,25 +35,26 @@ import javax.persistence.*;
|
|||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
//@formatter:off
|
||||
@Entity
|
||||
@Immutable
|
||||
@Subselect("SELECT h.pid as pid " +
|
||||
", h.res_id as res_id " +
|
||||
", h.res_type as res_type " +
|
||||
", h.res_version as res_version " + // FHIR version
|
||||
", h.res_ver as res_ver " + // resource version
|
||||
", h.has_tags as has_tags " +
|
||||
", h.res_deleted_at as res_deleted_at " +
|
||||
", h.res_published as res_published " +
|
||||
", h.res_updated as res_updated " +
|
||||
", h.res_text as res_text " +
|
||||
", h.res_encoding as res_encoding " +
|
||||
", f.forced_id as FORCED_PID " +
|
||||
@Subselect("SELECT h.pid as pid, " +
|
||||
" h.res_id as res_id, " +
|
||||
" h.res_type as res_type, " +
|
||||
" h.res_version as res_version, " + // FHIR version
|
||||
" h.res_ver as res_ver, " + // resource version
|
||||
" h.has_tags as has_tags, " +
|
||||
" h.res_deleted_at as res_deleted_at, " +
|
||||
" h.res_published as res_published, " +
|
||||
" h.res_updated as res_updated, " +
|
||||
" h.res_text as res_text, " +
|
||||
" h.res_encoding as res_encoding, " +
|
||||
" p.SOURCE_URI as PROV_SOURCE_URI," +
|
||||
" p.REQUEST_ID as PROV_REQUEST_ID," +
|
||||
" f.forced_id as FORCED_PID " +
|
||||
"FROM HFJ_RES_VER h "
|
||||
+ " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id "
|
||||
+ " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.res_ver_pid = h.pid "
|
||||
+ " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver")
|
||||
// @formatter:on
|
||||
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
@ -73,36 +75,41 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
|||
|
||||
@Column(name = "RES_VER")
|
||||
private Long myResourceVersion;
|
||||
|
||||
@Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH)
|
||||
private String myProvenanceRequestId;
|
||||
@Column(name = "PROV_SOURCE_URI", length = ResourceHistoryProvenanceEntity.SOURCE_URI_LENGTH)
|
||||
private String myProvenanceSourceUri;
|
||||
@Column(name = "HAS_TAGS")
|
||||
private boolean myHasTags;
|
||||
|
||||
@Column(name = "RES_DELETED_AT")
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myDeleted;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_PUBLISHED")
|
||||
private Date myPublished;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_UPDATED")
|
||||
private Date myUpdated;
|
||||
|
||||
@Column(name = "RES_TEXT")
|
||||
@Lob()
|
||||
private byte[] myResource;
|
||||
|
||||
@Column(name = "RES_ENCODING")
|
||||
@Enumerated(EnumType.STRING)
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
|
||||
@Column(name = "FORCED_PID", length= ForcedId.MAX_FORCED_ID_LENGTH)
|
||||
@Column(name = "FORCED_PID", length = ForcedId.MAX_FORCED_ID_LENGTH)
|
||||
private String myForcedPid;
|
||||
|
||||
public ResourceSearchView() {
|
||||
}
|
||||
|
||||
public String getProvenanceRequestId() {
|
||||
return myProvenanceRequestId;
|
||||
}
|
||||
|
||||
public String getProvenanceSourceUri() {
|
||||
return myProvenanceSourceUri;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getDeleted() {
|
||||
return myDeleted;
|
||||
|
|
|
@ -58,8 +58,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* </p>
|
||||
* <p>
|
||||
* When using this interceptor, client requests must include the parameter
|
||||
* <code>_cascade=true</code> on the DELETE URL in order to activate
|
||||
* cascading delete, or include the request header <code>X-Cascade-Delete: true</code>
|
||||
* <code>_cascade=delete</code> on the DELETE URL in order to activate
|
||||
* cascading delete, or include the request header <code>X-Cascade-Delete: delete</code>
|
||||
* </p>
|
||||
*/
|
||||
@Interceptor
|
||||
|
@ -113,7 +113,7 @@ public class CascadingDeleteInterceptor {
|
|||
|
||||
// Actually perform the delete
|
||||
ourLog.info("Have delete conflict {} - Cascading delete", next);
|
||||
dao.delete(nextSource, theRequest);
|
||||
dao.delete(nextSource, theConflictList, theRequest);
|
||||
|
||||
cascadedDeletes.add(nextSourceId);
|
||||
|
||||
|
|
|
@ -159,7 +159,7 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Select Queries:\n{}", String.join("\n", queries));
|
||||
ourLog.info("Update Queries:\n{}", String.join("\n", queries));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
@ -110,8 +111,8 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
|||
SLF4JLogLevel level = SLF4JLogLevel.INFO;
|
||||
DataSource dataSource = ProxyDataSourceBuilder
|
||||
.create(retVal)
|
||||
.logQueryBySlf4j(level, "SQL")
|
||||
// .logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||
// .logQueryBySlf4j(level, "SQL")
|
||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||
// .countQuery(new ThreadQueryCountHolder())
|
||||
.beforeQuery(new BlockLargeNumbersOfParamsListener())
|
||||
.afterQuery(captureQueriesListener())
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import org.apache.commons.text.RandomStringGenerator;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.dstu3.model.StringType;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@SuppressWarnings({"Duplicates"})
|
||||
public class FhirResourceDaoDstu3SourceTest extends BaseJpaDstu3Test {
|
||||
|
||||
@After
|
||||
public final void after() {
|
||||
when(mySrd.getRequestId()).thenReturn(null);
|
||||
myDaoConfig.setStoreMetaSourceInformation(new DaoConfig().getStoreMetaSourceInformation());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
myDaoConfig.setStoreMetaSourceInformation(DaoConfig.StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceStoreAndSearch() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:1"));
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search by source URI
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("urn:source:0"));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
pt0 = (Patient) result.getResources(0, 1).get(0);
|
||||
assertEquals("urn:source:0#a_request_id", pt0.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE));
|
||||
|
||||
// Search by request ID
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("#a_request_id"));
|
||||
result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue(), pt1id.getValue()));
|
||||
|
||||
// Search by source URI and request ID
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("urn:source:0#a_request_id"));
|
||||
result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithOr() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:1"));
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:2"));
|
||||
pt2.setActive(true);
|
||||
myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenOrListParam()
|
||||
.addOr(new TokenParam("urn:source:0"))
|
||||
.addOr(new TokenParam("urn:source:1")));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue(), pt1id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithAnd() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:1"));
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:2"));
|
||||
pt2.setActive(true);
|
||||
myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("@a_request_id")));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchLongRequestId() {
|
||||
String requestId = new RandomStringGenerator.Builder().build().generate(5000);
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("#" + requestId)));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceNotPreservedAcrossUpdate() {
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
pt0 = myPatientDao.read(pt0id);
|
||||
assertEquals("urn:source:0", pt0.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE));
|
||||
|
||||
pt0.getMeta().getExtension().clear();
|
||||
pt0.setActive(false);
|
||||
myPatientDao.update(pt0);
|
||||
|
||||
pt0 = myPatientDao.read(pt0id.withVersion("2"));
|
||||
assertEquals(null, pt0.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceDisabled() {
|
||||
myDaoConfig.setStoreMetaSourceInformation(DaoConfig.StoreMetaSourceInformation.NONE);
|
||||
when(mySrd.getRequestId()).thenReturn("0000000000000000");
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:0"));
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
pt0 = myPatientDao.read(pt0id);
|
||||
assertEquals(null, pt0.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE));
|
||||
|
||||
pt0.getMeta().addExtension(JpaConstants.EXT_META_SOURCE, new StringType("urn:source:1"));
|
||||
pt0.setActive(false);
|
||||
myPatientDao.update(pt0);
|
||||
|
||||
pt0 = myPatientDao.read(pt0id.withVersion("2"));
|
||||
assertEquals(null, pt0.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE));
|
||||
|
||||
// Search without source param
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
// Search with source param
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("@a_request_id")));
|
||||
try {
|
||||
myPatientDao.search(params);
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(e.getMessage(), "The _source parameter is disabled on this server");
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
public static void assertConflictException(String theResourceType, ResourceVersionConflictException e) {
|
||||
assertThat(e.getMessage(), matchesPattern(
|
||||
"Unable to delete [a-zA-Z]+/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource " + theResourceType + "/[0-9]+ in path [a-zA-Z]+.[a-zA-Z]+"));
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -2,14 +2,9 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -19,7 +14,6 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
||||
|
@ -42,11 +36,11 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
|
||||
// Current version should be marked as deleted
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1);
|
||||
assertNull(resourceTable.getDeleted());
|
||||
});
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
|
||||
assertNotNull(resourceTable.getDeleted());
|
||||
});
|
||||
|
||||
|
@ -169,7 +163,7 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
// Mark the current history version as not-deleted even though the actual resource
|
||||
// table entry is marked deleted
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
|
||||
resourceTable.setDeleted(null);
|
||||
myResourceHistoryTableDao.save(resourceTable);
|
||||
});
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.*;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.reset;
|
||||
|
||||
@TestPropertySource(properties = {
|
||||
"scheduling_disabled=true"
|
||||
})
|
||||
public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
|
||||
|
||||
@After
|
||||
public void afterResetDao() {
|
||||
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
myInterceptorRegistry.registerInterceptor(myInterceptor);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testUpdateWithNoChanges() {
|
||||
IIdType id = runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId(id.getIdPart());
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
myPatientDao.update(p).getResource();
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(5, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread(), empty());
|
||||
assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread(), empty());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testUpdateWithChanges() {
|
||||
IIdType id = runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId(id.getIdPart());
|
||||
p.addIdentifier().setSystem("urn:system").setValue("3");
|
||||
myPatientDao.update(p).getResource();
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRead() {
|
||||
IIdType id = runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
myPatientDao.read(id.toVersionless());
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testVRead() {
|
||||
IIdType id = runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
myPatientDao.read(id.withVersion("1"));
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -1298,6 +1298,34 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomCodeableConcept() {
|
||||
SearchParameter fooSp = new SearchParameter();
|
||||
fooSp.addBase("ChargeItem");
|
||||
fooSp.setName("Product");
|
||||
fooSp.setCode("product");
|
||||
fooSp.setType(org.hl7.fhir.r4.model.Enumerations.SearchParamType.TOKEN);
|
||||
fooSp.setTitle("Product within a ChargeItem");
|
||||
fooSp.setExpression("ChargeItem.product.as(CodeableConcept)");
|
||||
fooSp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
mySearchParameterDao.create(fooSp, mySrd);
|
||||
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
ChargeItem ci = new ChargeItem();
|
||||
ci.setProduct(new CodeableConcept());
|
||||
ci.getProductCodeableConcept().addCoding().setCode("1");
|
||||
myChargeItemDao.create(ci);
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
map.setLoadSynchronous(true);
|
||||
map.add("product", new TokenParam(null, "1"));
|
||||
IBundleProvider results = myChargeItemDao.search(map);
|
||||
assertEquals(1, results.size().intValue());
|
||||
|
||||
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import org.apache.commons.text.RandomStringGenerator;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@SuppressWarnings({"Duplicates"})
|
||||
public class FhirResourceDaoR4SourceTest extends BaseJpaR4Test {
|
||||
|
||||
@After
|
||||
public final void after() {
|
||||
when(mySrd.getRequestId()).thenReturn(null);
|
||||
myDaoConfig.setStoreMetaSourceInformation(new DaoConfig().getStoreMetaSourceInformation());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
myDaoConfig.setStoreMetaSourceInformation(DaoConfig.StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceStoreAndSearch() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().setSource("urn:source:1");
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search by source URI
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("urn:source:0"));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
pt0 = (Patient) result.getResources(0, 1).get(0);
|
||||
assertEquals("urn:source:0#a_request_id", pt0.getMeta().getSource());
|
||||
|
||||
// Search by request ID
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("#a_request_id"));
|
||||
result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue(), pt1id.getValue()));
|
||||
|
||||
// Search by source URI and request ID
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenParam("urn:source:0#a_request_id"));
|
||||
result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithOr() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().setSource("urn:source:1");
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.getMeta().setSource("urn:source:2");
|
||||
pt2.setActive(true);
|
||||
myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenOrListParam()
|
||||
.addOr(new TokenParam("urn:source:0"))
|
||||
.addOr(new TokenParam("urn:source:1")));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue(), pt1id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithAnd() {
|
||||
String requestId = "a_request_id";
|
||||
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.getMeta().setSource("urn:source:1");
|
||||
pt1.setActive(true);
|
||||
IIdType pt1id = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.getMeta().setSource("urn:source:2");
|
||||
pt2.setActive(true);
|
||||
myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("@a_request_id")));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchLongRequestId() {
|
||||
String requestId = new RandomStringGenerator.Builder().build().generate(5000);
|
||||
when(mySrd.getRequestId()).thenReturn(requestId);
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Search
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("#" + requestId)));
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceNotPreservedAcrossUpdate() {
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
pt0 = myPatientDao.read(pt0id);
|
||||
assertEquals("urn:source:0", pt0.getMeta().getSource());
|
||||
|
||||
pt0.getMeta().setSource(null);
|
||||
pt0.setActive(false);
|
||||
myPatientDao.update(pt0);
|
||||
|
||||
pt0 = myPatientDao.read(pt0id.withVersion("2"));
|
||||
assertEquals(null, pt0.getMeta().getSource());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSourceDisabled() {
|
||||
myDaoConfig.setStoreMetaSourceInformation(DaoConfig.StoreMetaSourceInformation.NONE);
|
||||
when(mySrd.getRequestId()).thenReturn("0000000000000000");
|
||||
|
||||
Patient pt0 = new Patient();
|
||||
pt0.getMeta().setSource("urn:source:0");
|
||||
pt0.setActive(true);
|
||||
IIdType pt0id = myPatientDao.create(pt0, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
pt0 = myPatientDao.read(pt0id);
|
||||
assertEquals(null, pt0.getMeta().getSource());
|
||||
|
||||
pt0.getMeta().setSource("urn:source:1");
|
||||
pt0.setActive(false);
|
||||
myPatientDao.update(pt0);
|
||||
|
||||
pt0 = myPatientDao.read(pt0id.withVersion("2"));
|
||||
assertEquals(null, pt0.getMeta().getSource());
|
||||
|
||||
// Search without source param
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
IBundleProvider result = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(pt0id.getValue()));
|
||||
|
||||
// Search with source param
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronous(true);
|
||||
params.add(Constants.PARAM_SOURCE, new TokenAndListParam()
|
||||
.addAnd(new TokenParam("urn:source:0"), new TokenParam("@a_request_id")));
|
||||
try {
|
||||
myPatientDao.search(params);
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(e.getMessage(), "The _source parameter is disabled on this server");
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
public static void assertConflictException(String theResourceType, ResourceVersionConflictException e) {
|
||||
assertThat(e.getMessage(), matchesPattern(
|
||||
"Unable to delete [a-zA-Z]+/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource " + theResourceType + "/[0-9]+ in path [a-zA-Z]+.[a-zA-Z]+"));
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -187,7 +187,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
table.setDeleted(new Date());
|
||||
table = myResourceTableDao.saveAndFlush(table);
|
||||
ResourceHistoryTable newHistory = table.toHistory();
|
||||
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L);
|
||||
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
|
||||
newHistory.setEncoding(currentHistory.getEncoding());
|
||||
newHistory.setResource(currentHistory.getResource());
|
||||
myResourceHistoryTableDao.save(newHistory);
|
||||
|
@ -2724,7 +2724,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
tx.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
|
||||
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
|
||||
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
||||
newContent = newContent.replace("male", "foo");
|
||||
table.setResource(newContent.getBytes(Charsets.UTF_8));
|
||||
|
|
|
@ -108,20 +108,12 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
return resourceTable.getUpdated().getValueAsString();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId(id.getIdPart());
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
myPatientDao.update(p);
|
||||
myPatientDao.update(p).getResource();
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// TODO: it'd be nice if this was lower
|
||||
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread(), empty());
|
||||
assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread(), empty());
|
||||
|
||||
runInTransaction(() -> {
|
||||
List<ResourceTable> allResources = myResourceTableDao.findAll();
|
||||
|
@ -157,7 +149,9 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
|
||||
// Do a read
|
||||
{
|
||||
myCaptureQueriesListener.clear();
|
||||
Patient patient = myPatientDao.read(id, mySrd);
|
||||
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
||||
List<CanonicalType> tl = patient.getMeta().getProfile();
|
||||
assertEquals(1, tl.size());
|
||||
assertEquals("http://foo/bar", tl.get(0).getValue());
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
import ca.uhn.fhir.rest.param.StringOrListParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class FhirSearchDaoR4Test extends BaseJpaR4Test {
|
||||
|
||||
|
|
|
@ -14,13 +14,9 @@ import ca.uhn.fhir.rest.param.StringParam;
|
|||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.*;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Bundle.*;
|
||||
|
@ -520,7 +516,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
TransactionTemplate template = new TransactionTemplate(myTxManager);
|
||||
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
template.execute((TransactionCallback<ResourceTable>) t -> {
|
||||
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
|
||||
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
|
||||
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
|
||||
try {
|
||||
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
||||
|
@ -569,7 +565,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
|
||||
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 3);
|
||||
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3);
|
||||
assertNotNull(historyEntry);
|
||||
myResourceHistoryTableDao.delete(historyEntry);
|
||||
});
|
||||
|
|
|
@ -64,7 +64,6 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
protected static String ourServerBase;
|
||||
protected static SearchParamRegistryR4 ourSearchParamRegistry;
|
||||
private static DatabaseBackedPagingProvider ourPagingProvider;
|
||||
protected static ISearchDao mySearchEntityDao;
|
||||
protected static ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
private static GenericWebApplicationContext ourWebApplicationContext;
|
||||
private static SubscriptionMatcherInterceptor ourSubscriptionMatcherInterceptor;
|
||||
|
@ -165,7 +164,6 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
WebApplicationContext wac = WebApplicationContextUtils.getWebApplicationContext(subsServletHolder.getServlet().getServletConfig().getServletContext());
|
||||
myValidationSupport = wac.getBean(JpaValidationSupportChainR4.class);
|
||||
mySearchCoordinatorSvc = wac.getBean(ISearchCoordinatorSvc.class);
|
||||
mySearchEntityDao = wac.getBean(ISearchDao.class);
|
||||
ourSearchParamRegistry = wac.getBean(SearchParamRegistryR4.class);
|
||||
ourSubscriptionMatcherInterceptor = wac.getBean(SubscriptionMatcherInterceptor.class);
|
||||
ourSubscriptionMatcherInterceptor.start();
|
||||
|
|
|
@ -136,6 +136,48 @@ public class CascadingDeleteInterceptorR4Test extends BaseResourceProviderR4Test
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteCascadingWithCircularReference() throws IOException {
|
||||
|
||||
Organization o0 = new Organization();
|
||||
o0.setName("O0");
|
||||
IIdType o0id = myOrganizationDao.create(o0).getId().toUnqualifiedVersionless();
|
||||
|
||||
Organization o1 = new Organization();
|
||||
o1.setName("O1");
|
||||
o1.getPartOf().setReference(o0id.getValue());
|
||||
IIdType o1id = myOrganizationDao.create(o1).getId().toUnqualifiedVersionless();
|
||||
|
||||
o0.getPartOf().setReference(o1id.getValue());
|
||||
myOrganizationDao.update(o0);
|
||||
|
||||
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
|
||||
|
||||
HttpDelete delete = new HttpDelete(ourServerBase + "/Organization/" + o0id.getIdPart() + "?" + Constants.PARAMETER_CASCADE_DELETE + "=" + Constants.CASCADE_DELETE + "&_pretty=true");
|
||||
delete.addHeader(Constants.HEADER_ACCEPT, Constants.CT_FHIR_JSON_NEW);
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String deleteResponse = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||
ourLog.info("Response: {}", deleteResponse);
|
||||
assertThat(deleteResponse, containsString("Cascaded delete to "));
|
||||
}
|
||||
|
||||
try {
|
||||
ourLog.info("Reading {}", o0id);
|
||||
ourClient.read().resource(Organization.class).withId(o0id).execute();
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
// good
|
||||
}
|
||||
try {
|
||||
ourLog.info("Reading {}", o1id);
|
||||
ourClient.read().resource(Organization.class).withId(o1id).execute();
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
// good
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteCascadingByHeader() throws IOException {
|
||||
createResources();
|
||||
|
|
|
@ -75,6 +75,7 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
Patient p = new Patient();
|
||||
p.setId("PT-ONEVERSION");
|
||||
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
||||
p.getMeta().setSource("http://foo_source");
|
||||
p.setActive(true);
|
||||
p.addIdentifier().setSystem("foo").setValue("bar");
|
||||
p.addName().setFamily("FAM");
|
||||
|
|
|
@ -19,7 +19,6 @@ import ca.uhn.fhir.rest.client.api.IGenericClient;
|
|||
import ca.uhn.fhir.rest.client.api.IHttpRequest;
|
||||
import ca.uhn.fhir.rest.client.api.IHttpResponse;
|
||||
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -3017,7 +3016,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus status) {
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1);
|
||||
myResourceHistoryTableDao.delete(version);
|
||||
}
|
||||
});
|
||||
|
@ -3038,15 +3037,18 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
p2.setActive(false);
|
||||
IIdType id2 = ourClient.create().resource(p2).execute().getId();
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus status) {
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1);
|
||||
myResourceHistoryTableDao.delete(version);
|
||||
}
|
||||
});
|
||||
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
||||
|
||||
Bundle bundle = ourClient.search().forResource("Patient").returnBundle(Bundle.class).execute();
|
||||
ourLog.info("Result: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
|
||||
assertEquals(2, bundle.getTotal());
|
||||
assertEquals(1, bundle.getEntry().size());
|
||||
assertEquals(id2.getIdPart(), bundle.getEntry().get(0).getResource().getIdElement().getIdPart());
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
||||
import ca.uhn.fhir.jpa.model.entity.*;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -55,6 +56,29 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init350();
|
||||
init360();
|
||||
init400();
|
||||
init410();
|
||||
}
|
||||
|
||||
protected void init410() {
|
||||
Builder version = forVersion(VersionEnum.V4_1_0);
|
||||
|
||||
version.startSectionWithMessage("Processing table: HFJ_RES_VER_PROV");
|
||||
Builder.BuilderAddTableByColumns resVerProv = version.addTableByColumns("HFJ_RES_VER_PROV", "RES_VER_PID");
|
||||
resVerProv.addColumn("RES_VER_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
resVerProv
|
||||
.addForeignKey("FK_RESVERPROV_RESVER_PID")
|
||||
.toColumn("RES_VER_PID")
|
||||
.references("HFJ_RES_VER", "PID");
|
||||
resVerProv.addColumn("RES_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
resVerProv
|
||||
.addForeignKey("FK_RESVERPROV_RES_PID")
|
||||
.toColumn("RES_PID")
|
||||
.references("HFJ_RESOURCE", "RES_ID");
|
||||
resVerProv.addColumn("SOURCE_URI").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, ResourceHistoryProvenanceEntity.SOURCE_URI_LENGTH);
|
||||
resVerProv.addColumn("REQUEST_ID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, Constants.REQUEST_ID_LENGTH);
|
||||
resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");
|
||||
resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID");
|
||||
|
||||
}
|
||||
|
||||
protected void init400() {
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
package ca.uhn.fhir.jpa.model.entity;
|
||||
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
||||
import javax.persistence.*;
|
||||
|
||||
@Table(name = "HFJ_RES_VER_PROV", indexes = {
|
||||
@Index(name = "IDX_RESVERPROV_SOURCEURI", columnList = "SOURCE_URI"),
|
||||
@Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID")
|
||||
})
|
||||
@Entity
|
||||
public class ResourceHistoryProvenanceEntity {
|
||||
|
||||
public static final int SOURCE_URI_LENGTH = 100;
|
||||
|
||||
@Id
|
||||
@Column(name = "RES_VER_PID")
|
||||
private Long myId;
|
||||
@OneToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "RES_VER_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_RESVERPROV_RESVER_PID"), nullable = false, insertable = false, updatable = false)
|
||||
@MapsId
|
||||
private ResourceHistoryTable myResourceHistoryTable;
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "RES_PID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_RESVERPROV_RES_PID"), nullable = false)
|
||||
private ResourceTable myResourceTable;
|
||||
@Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true)
|
||||
private String mySourceUri;
|
||||
@Column(name = "REQUEST_ID", length = Constants.REQUEST_ID_LENGTH, nullable = true)
|
||||
private String myRequestId;
|
||||
|
||||
public ResourceTable getResourceTable() {
|
||||
return myResourceTable;
|
||||
}
|
||||
|
||||
public void setResourceTable(ResourceTable theResourceTable) {
|
||||
myResourceTable = theResourceTable;
|
||||
}
|
||||
|
||||
public ResourceHistoryTable getResourceHistoryTable() {
|
||||
return myResourceHistoryTable;
|
||||
}
|
||||
|
||||
public void setResourceHistoryTable(ResourceHistoryTable theResourceHistoryTable) {
|
||||
myResourceHistoryTable = theResourceHistoryTable;
|
||||
}
|
||||
|
||||
public String getSourceUri() {
|
||||
return mySourceUri;
|
||||
}
|
||||
|
||||
public void setSourceUri(String theSourceUri) {
|
||||
mySourceUri = theSourceUri;
|
||||
}
|
||||
|
||||
public String getRequestId() {
|
||||
return myRequestId;
|
||||
}
|
||||
|
||||
public void setRequestId(String theRequestId) {
|
||||
myRequestId = theRequestId;
|
||||
}
|
||||
|
||||
}
|
|
@ -20,8 +20,6 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
|
||||
import javax.persistence.*;
|
||||
|
@ -29,7 +27,6 @@ import java.io.Serializable;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
//@formatter:off
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_VER", uniqueConstraints = {
|
||||
@UniqueConstraint(name = ResourceHistoryTable.IDX_RESVER_ID_VER, columnNames = {"RES_ID", "RES_VER"})
|
||||
|
@ -38,16 +35,14 @@ import java.util.Collection;
|
|||
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
|
||||
@Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED")
|
||||
})
|
||||
//@formatter:on
|
||||
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER";
|
||||
/**
|
||||
* @see ResourceEncodingEnum
|
||||
*/
|
||||
public static final int ENCODING_COL_LENGTH = 5;
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
@Id
|
||||
@SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID")
|
||||
|
@ -76,10 +71,21 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@OptimisticLock(excluded = true)
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
|
||||
@OneToOne(mappedBy = "myResourceHistoryTable", cascade = {CascadeType.REMOVE})
|
||||
private ResourceHistoryProvenanceEntity myProvenance;
|
||||
|
||||
public ResourceHistoryTable() {
|
||||
super();
|
||||
}
|
||||
|
||||
public ResourceHistoryProvenanceEntity getProvenance() {
|
||||
return myProvenance;
|
||||
}
|
||||
|
||||
public void setProvenance(ResourceHistoryProvenanceEntity theProvenance) {
|
||||
myProvenance = theProvenance;
|
||||
}
|
||||
|
||||
public void addTag(ResourceHistoryTag theTag) {
|
||||
for (ResourceHistoryTag next : getTags()) {
|
||||
if (next.equals(theTag)) {
|
||||
|
|
|
@ -214,6 +214,10 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
@Version
|
||||
@Column(name = "RES_VER")
|
||||
private long myVersion;
|
||||
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)
|
||||
private Collection<ResourceHistoryProvenanceEntity> myProvenance;
|
||||
@Transient
|
||||
private transient ResourceHistoryTable myCurrentVersionEntity;
|
||||
|
||||
public Collection<ResourceLink> getResourceLinksAsTarget() {
|
||||
if (myResourceLinksAsTarget == null) {
|
||||
|
@ -610,4 +614,19 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
||||
* not persisted.
|
||||
*/
|
||||
public void setCurrentVersionEntity(ResourceHistoryTable theCurrentVersionEntity) {
|
||||
myCurrentVersionEntity = theCurrentVersionEntity;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
||||
* not persisted.
|
||||
*/
|
||||
public ResourceHistoryTable getCurrentVersionEntity() {
|
||||
return myCurrentVersionEntity;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,5 +32,5 @@ public enum TagTypeEnum {
|
|||
PROFILE,
|
||||
|
||||
SECURITY_LABEL
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -228,4 +228,15 @@ public class JpaConstants {
|
|||
*/
|
||||
public static final String EXT_EXTERNALIZED_BINARY_ID = "http://hapifhir.io/fhir/StructureDefinition/externalized-binary-id";
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This extension represents the equivalent of the
|
||||
* <code>Resource.meta.source</code> field within R4+ resources, and is for
|
||||
* use in DSTU3 resources. It should contain a value of type <code>uri</code>
|
||||
* and will be located on the Resource.meta
|
||||
* </p>
|
||||
*/
|
||||
public static final String EXT_META_SOURCE = "http://hapifhir.io/fhir/StructureDefinition/resource-meta-source";
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.model.entity;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -58,7 +58,6 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.servlet.Servlet;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.UnavailableException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
|
@ -1124,7 +1123,7 @@ public class RestfulServer extends HttpServlet implements IRestfulServer<Servlet
|
|||
|
||||
if (isBlank(requestId)) {
|
||||
requestId = Long.toHexString(RANDOM.nextLong());
|
||||
requestId = leftPad(requestId, 16, '0');
|
||||
requestId = leftPad(requestId, Constants.REQUEST_ID_LENGTH, '0');
|
||||
}
|
||||
|
||||
return requestId;
|
||||
|
|
|
@ -69,6 +69,10 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* otherwise</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>${requestId}</td>
|
||||
* <td>The request ID assigned to this request (either automatically, or via the <code>X-Request-ID</code> header in the request)</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>${operationType}</td>
|
||||
* <td>A code indicating the operation type for this request, e.g. "read", "history-instance",
|
||||
* "extended-operation-instance", etc.)</td>
|
||||
|
@ -323,6 +327,8 @@ public class LoggingInterceptor {
|
|||
long time = System.currentTimeMillis() - startTime.getTime();
|
||||
return Long.toString(time);
|
||||
}
|
||||
} else if ("requestId".equals(theKey)) {
|
||||
return myRequestDetails.getRequestId();
|
||||
}
|
||||
|
||||
return "!VAL!";
|
||||
|
|
|
@ -216,6 +216,11 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -98,6 +98,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
return myContained;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdDt getId() {
|
||||
if (myId == null) {
|
||||
myId = new IdDt();
|
||||
|
@ -121,7 +122,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
@Override
|
||||
public IBaseMetaType getMeta() {
|
||||
return new IBaseMetaType() {
|
||||
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
|
@ -132,12 +133,12 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
newTagList.addAll(existingTagList);
|
||||
}
|
||||
ResourceMetadataKeyEnum.PROFILES.put(BaseResource.this, newTagList);
|
||||
|
||||
|
||||
IdDt tag = new IdDt(theProfile);
|
||||
newTagList.add(tag);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseCoding addSecurity() {
|
||||
List<BaseCodingDt> tagList = ResourceMetadataKeyEnum.SECURITY_LABELS.get(BaseResource.this);
|
||||
|
@ -149,7 +150,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
tagList.add(tag);
|
||||
return tag;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseCoding addTag() {
|
||||
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(BaseResource.this);
|
||||
|
@ -161,17 +162,17 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
tagList.add(tag);
|
||||
return tag;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> getFormatCommentsPost() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> getFormatCommentsPre() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Date getLastUpdated() {
|
||||
InstantDt lu = ResourceMetadataKeyEnum.UPDATED.get(BaseResource.this);
|
||||
|
@ -180,7 +181,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<? extends IPrimitiveType<String>> getProfile() {
|
||||
ArrayList<IPrimitiveType<String>> retVal = new ArrayList<IPrimitiveType<String>>();
|
||||
|
@ -193,7 +194,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<? extends IBaseCoding> getSecurity() {
|
||||
ArrayList<CodingDt> retVal = new ArrayList<CodingDt>();
|
||||
|
@ -206,7 +207,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseCoding getSecurity(String theSystem, String theCode) {
|
||||
for (IBaseCoding next : getSecurity()) {
|
||||
|
@ -216,7 +217,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<? extends IBaseCoding> getTag() {
|
||||
ArrayList<IBaseCoding> retVal = new ArrayList<IBaseCoding>();
|
||||
|
@ -229,7 +230,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseCoding getTag(String theSystem, String theCode) {
|
||||
for (IBaseCoding next : getTag()) {
|
||||
|
@ -239,22 +240,22 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getVersionId() {
|
||||
return getId().getVersionIdPart();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean hasFormatComment() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return getResourceMetadata().isEmpty();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseMetaType setLastUpdated(Date theHeaderDateValue) {
|
||||
if (theHeaderDateValue == null) {
|
||||
|
@ -264,7 +265,7 @@ public abstract class BaseResource extends BaseElement implements IResource {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IBaseMetaType setVersionId(String theVersionId) {
|
||||
setId(getId().withVersion(theVersionId));
|
||||
|
|
|
@ -802,7 +802,6 @@ public class XmlParserDstu2Test {
|
|||
|
||||
//@formatter:off
|
||||
assertThat(enc, stringContainsInOrder("<Patient xmlns=\"http://hl7.org/fhir\">",
|
||||
"<meta>",
|
||||
"<meta>",
|
||||
"<profile value=\"http://foo/Profile1\"/>",
|
||||
"<profile value=\"http://foo/Profile2\"/>",
|
||||
|
@ -817,7 +816,6 @@ public class XmlParserDstu2Test {
|
|||
"<display value=\"label2\"/>",
|
||||
"</tag>",
|
||||
"</meta>",
|
||||
"</meta>",
|
||||
"<name>",
|
||||
"<family value=\"FAMILY\"/>",
|
||||
"</name>",
|
||||
|
@ -856,7 +854,6 @@ public class XmlParserDstu2Test {
|
|||
|
||||
//@formatter:off
|
||||
assertThat(enc, stringContainsInOrder("<Patient xmlns=\"http://hl7.org/fhir\">",
|
||||
"<meta>",
|
||||
"<meta>",
|
||||
"<tag>",
|
||||
"<system value=\"scheme1\"/>",
|
||||
|
@ -869,7 +866,6 @@ public class XmlParserDstu2Test {
|
|||
"<display value=\"label2\"/>",
|
||||
"</tag>",
|
||||
"</meta>",
|
||||
"</meta>",
|
||||
"<name>",
|
||||
"<family value=\"FAMILY\"/>",
|
||||
"</name>",
|
||||
|
|
|
@ -152,6 +152,26 @@ public class LoggingInterceptorDstu2Test {
|
|||
assertEquals("read - - Patient/1 - ", captor.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequestId() throws Exception {
|
||||
|
||||
LoggingInterceptor interceptor = new LoggingInterceptor();
|
||||
interceptor.setMessageFormat("${requestId}");
|
||||
servlet.getInterceptorService().registerInterceptor(interceptor);
|
||||
|
||||
Logger logger = mock(Logger.class);
|
||||
interceptor.setLogger(logger);
|
||||
|
||||
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1");
|
||||
|
||||
HttpResponse status = ourClient.execute(httpGet);
|
||||
IOUtils.closeQuietly(status.getEntity().getContent());
|
||||
|
||||
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
|
||||
verify(logger, timeout(1000).times(1)).info(captor.capture());
|
||||
assertEquals(Constants.REQUEST_ID_LENGTH, captor.getValue().length());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequestProcessingTime() throws Exception {
|
||||
|
||||
|
|
|
@ -200,7 +200,6 @@ public class InterceptorDstu3Test {
|
|||
when(myInterceptor1.outgoingResponse(nullable(RequestDetails.class), nullable(ResponseDetails.class), nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
|
||||
when(myInterceptor1.outgoingResponse(nullable(RequestDetails.class), nullable(IBaseResource.class), nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
|
||||
when(myInterceptor1.outgoingResponse(nullable(RequestDetails.class), nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
|
||||
doThrow(new NullPointerException("FOO")).when(myInterceptor1).processingCompletedNormally(any());
|
||||
|
||||
String input = createInput();
|
||||
|
||||
|
|
|
@ -403,6 +403,21 @@ public class ResponseHighlightingInterceptorTest {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHighlightGraphQLResponseNonHighlighted() throws Exception {
|
||||
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/A/$graphql?query=" + UrlUtil.escapeUrlParam("{name}"));
|
||||
httpGet.addHeader("Accept", "application/jon");
|
||||
CloseableHttpResponse status = ourClient.execute(httpGet);
|
||||
String responseContent = IOUtils.toString(status.getEntity().getContent(), Charsets.UTF_8);
|
||||
status.close();
|
||||
|
||||
ourLog.info("Resp: {}", responseContent);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
|
||||
assertThat(responseContent, stringContainsInOrder("{\"foo\":\"bar\"}"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHighlightException() throws Exception {
|
||||
ResponseHighlighterInterceptor ic = ourInterceptor;
|
||||
|
|
|
@ -46,11 +46,6 @@
|
|||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
20
pom.xml
20
pom.xml
|
@ -594,7 +594,7 @@
|
|||
<jetty_version>9.4.14.v20181114</jetty_version>
|
||||
<jsr305_version>3.0.2</jsr305_version>
|
||||
<!--<hibernate_version>5.2.10.Final</hibernate_version>-->
|
||||
<hibernate_version>5.4.2.Final</hibernate_version>
|
||||
<hibernate_version>5.4.4.Final</hibernate_version>
|
||||
<!-- Update lucene version when you update hibernate-search version -->
|
||||
<hibernate_search_version>5.11.1.Final</hibernate_search_version>
|
||||
<lucene_version>5.5.5</lucene_version>
|
||||
|
@ -1062,21 +1062,7 @@
|
|||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>java-hamcrest</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<version>4.0.0-rc1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.plexus</groupId>
|
||||
|
@ -1281,7 +1267,7 @@
|
|||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>2.28.2</version>
|
||||
<version>3.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
|
|
|
@ -7,6 +7,15 @@
|
|||
</properties>
|
||||
<body>
|
||||
<release version="4.1.0" date="TBD" description="Igloo">
|
||||
<action type="add">
|
||||
The version of a few dependencies have been bumped to the
|
||||
latest versions (dependent HAPI modules listed in brackets):
|
||||
<![CDATA[
|
||||
<ul>
|
||||
<li>Hibernate Core (Core): 5.4.2.Final -> 5.4.4.Final</li>
|
||||
</ul>
|
||||
]]>
|
||||
</action>
|
||||
<!--
|
||||
<action type="add" issue="1321">
|
||||
Support has been added for RDF encoding and parsing in the
|
||||
|
@ -14,7 +23,15 @@
|
|||
format. Thanks to Raul Estrada for the pull request!
|
||||
</action>
|
||||
-->
|
||||
<action type="add" issye="1357">
|
||||
<action type="add">
|
||||
<![CDATA[
|
||||
<b>New Feature</b>:
|
||||
The JPA server now saves and supports searching on <code>Resource.meta.source</code>. The server automatically
|
||||
appends the Request ID as a hash value on the URI as well in order to provide request level tracking. Searches
|
||||
can use either the source URI, the request ID, or both.
|
||||
]]>
|
||||
</action>
|
||||
<action type="add" issue="1357">
|
||||
The email Subscription deliverer now respects the payload property of the subscription
|
||||
when deciding how to encode the resource being sent. Thanks to Sean McIlvenna for the
|
||||
pull request!
|
||||
|
@ -44,6 +61,18 @@
|
|||
SubscriptionDstu2Config incorrectly pointed to a DSTU3 configuration file. This
|
||||
has been corrected.
|
||||
</action>
|
||||
<action type="fix">
|
||||
When using the VersionedApiConverterInterceptor, GraphQL responses failed with an HTTP
|
||||
500 error.
|
||||
</action>
|
||||
<action type="fix">
|
||||
Cascading deletes now correctly handle circular references. Previously this failed with
|
||||
an HTTP 500 error.
|
||||
</action>
|
||||
<action type="fix">
|
||||
The informational message returned in an OperationOutcome when a delete failed due to cascades not being enabled
|
||||
contained an incorrect example. This has been corrected.
|
||||
</action>
|
||||
</release>
|
||||
<release version="4.0.0" date="2019-08-14" description="Igloo">
|
||||
<action type="add">
|
||||
|
@ -531,7 +560,7 @@
|
|||
not any values contained within. This has been corrected.
|
||||
</action>
|
||||
<action type="add">
|
||||
The JPA terminology service can now detect when Hibvernate Search (Lucene)
|
||||
The JPA terminology service can now detect when Hibernate Search (Lucene)
|
||||
is not enabled, and will perform simple ValueSet expansions without relying
|
||||
on Hibenrate Search in such cases.
|
||||
</action>
|
||||
|
|
Loading…
Reference in New Issue