Merge branch 'master' into custom-elastic-index
This commit is contained in:
commit
6a2a5d5097
|
@ -69,7 +69,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
|
||||||
List<IBaseResource> outgoing = new ArrayList<>();
|
List<IBaseResource> outgoing = new ArrayList<>();
|
||||||
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
||||||
|
|
||||||
ourLog.trace("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||||
|
|
||||||
return outgoing;
|
return outgoing;
|
||||||
|
|
||||||
|
|
|
@ -618,49 +618,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||||
|
|
||||||
if (!skipUpdatingTags) {
|
if (!skipUpdatingTags) {
|
||||||
Set<ResourceTag> allDefs = new HashSet<>();
|
changed |= updateTags(theTransactionDetails, theRequest, theResource, theEntity);
|
||||||
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
|
||||||
|
|
||||||
if (theResource instanceof IResource) {
|
|
||||||
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
|
||||||
} else {
|
|
||||||
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
|
||||||
}
|
|
||||||
|
|
||||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
|
||||||
if (def.isStandardType() == false) {
|
|
||||||
String profile = def.getResourceProfile("");
|
|
||||||
if (isNotBlank(profile)) {
|
|
||||||
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
|
||||||
|
|
||||||
ResourceTag tag = theEntity.addTag(profileDef);
|
|
||||||
allDefs.add(tag);
|
|
||||||
theEntity.setHasTags(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
|
||||||
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
|
||||||
allTagsNew.forEach(tag -> {
|
|
||||||
|
|
||||||
// Don't keep duplicate tags
|
|
||||||
if (!allDefsPresent.add(tag.getTag())) {
|
|
||||||
theEntity.getTags().remove(tag);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Drop any tags that have been removed
|
|
||||||
if (!allDefs.contains(tag)) {
|
|
||||||
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
|
||||||
theEntity.getTags().remove(tag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!allTagsOld.equals(allTagsNew)) {
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
theEntity.setHasTags(!allTagsNew.isEmpty());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -669,7 +627,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
encoding = ResourceEncodingEnum.DEL;
|
encoding = ResourceEncodingEnum.DEL;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (thePerformIndexing && changed == false) {
|
if (thePerformIndexing && !changed) {
|
||||||
if (theEntity.getId() == null) {
|
if (theEntity.getId() == null) {
|
||||||
changed = true;
|
changed = true;
|
||||||
} else if (myConfig.isMassIngestionMode()) {
|
} else if (myConfig.isMassIngestionMode()) {
|
||||||
|
@ -701,6 +659,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean updateTags(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity) {
|
||||||
|
Set<ResourceTag> allDefs = new HashSet<>();
|
||||||
|
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
||||||
|
|
||||||
|
if (theResource instanceof IResource) {
|
||||||
|
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
||||||
|
} else {
|
||||||
|
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
||||||
|
}
|
||||||
|
|
||||||
|
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||||
|
if (def.isStandardType() == false) {
|
||||||
|
String profile = def.getResourceProfile("");
|
||||||
|
if (isNotBlank(profile)) {
|
||||||
|
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||||
|
|
||||||
|
ResourceTag tag = theEntity.addTag(profileDef);
|
||||||
|
allDefs.add(tag);
|
||||||
|
theEntity.setHasTags(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
||||||
|
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
||||||
|
allTagsNew.forEach(tag -> {
|
||||||
|
|
||||||
|
// Don't keep duplicate tags
|
||||||
|
if (!allDefsPresent.add(tag.getTag())) {
|
||||||
|
theEntity.getTags().remove(tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Drop any tags that have been removed
|
||||||
|
if (!allDefs.contains(tag)) {
|
||||||
|
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
||||||
|
theEntity.getTags().remove(tag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
theEntity.setHasTags(!allTagsNew.isEmpty());
|
||||||
|
return !allTagsOld.equals(allTagsNew);
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
|
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
|
||||||
R retVal = (R) res;
|
R retVal = (R) res;
|
||||||
|
@ -1288,52 +1290,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
postUpdate(entity, (T) theResource);
|
postUpdate(entity, (T) theResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Create history entry
|
|
||||||
*/
|
|
||||||
if (theCreateNewHistoryEntry) {
|
if (theCreateNewHistoryEntry) {
|
||||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
createHistoryEntry(theRequest, theResource, entity, changed);
|
||||||
final ResourceHistoryTable historyEntry = entity.toHistory(versionedTags);
|
|
||||||
historyEntry.setEncoding(changed.getEncoding());
|
|
||||||
historyEntry.setResource(changed.getResource());
|
|
||||||
|
|
||||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
|
||||||
myResourceHistoryTableDao.save(historyEntry);
|
|
||||||
|
|
||||||
// Save resource source
|
|
||||||
String source = null;
|
|
||||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
|
||||||
if (theResource != null) {
|
|
||||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
|
||||||
IBaseMetaType meta = theResource.getMeta();
|
|
||||||
source = MetaUtil.getSource(myContext, meta);
|
|
||||||
}
|
|
||||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
|
||||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
|
||||||
.getExtension()
|
|
||||||
.stream()
|
|
||||||
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
|
||||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
|
||||||
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
|
||||||
.findFirst()
|
|
||||||
.orElse(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
|
||||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
|
||||||
if (haveSource || haveRequestId) {
|
|
||||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
|
||||||
provenance.setResourceHistoryTable(historyEntry);
|
|
||||||
provenance.setResourceTable(entity);
|
|
||||||
provenance.setPartitionId(entity.getPartitionId());
|
|
||||||
if (haveRequestId) {
|
|
||||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
|
||||||
}
|
|
||||||
if (haveSource) {
|
|
||||||
provenance.setSourceUri(source);
|
|
||||||
}
|
|
||||||
myEntityManager.persist(provenance);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -1415,6 +1373,51 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||||
|
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||||
|
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||||
|
historyEntry.setEncoding(theChanged.getEncoding());
|
||||||
|
historyEntry.setResource(theChanged.getResource());
|
||||||
|
|
||||||
|
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||||
|
myResourceHistoryTableDao.save(historyEntry);
|
||||||
|
|
||||||
|
// Save resource source
|
||||||
|
String source = null;
|
||||||
|
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||||
|
if (theResource != null) {
|
||||||
|
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||||
|
IBaseMetaType meta = theResource.getMeta();
|
||||||
|
source = MetaUtil.getSource(myContext, meta);
|
||||||
|
}
|
||||||
|
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||||
|
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||||
|
.getExtension()
|
||||||
|
.stream()
|
||||||
|
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
||||||
|
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||||
|
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
||||||
|
.findFirst()
|
||||||
|
.orElse(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||||
|
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||||
|
if (haveSource || haveRequestId) {
|
||||||
|
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||||
|
provenance.setResourceHistoryTable(historyEntry);
|
||||||
|
provenance.setResourceTable(theEntity);
|
||||||
|
provenance.setPartitionId(theEntity.getPartitionId());
|
||||||
|
if (haveRequestId) {
|
||||||
|
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||||
|
}
|
||||||
|
if (haveSource) {
|
||||||
|
provenance.setSourceUri(source);
|
||||||
|
}
|
||||||
|
myEntityManager.persist(provenance);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, ResourceTable entity) {
|
private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, ResourceTable entity) {
|
||||||
String resourceType = myContext.getResourceType(theResource);
|
String resourceType = myContext.getResourceType(theResource);
|
||||||
if (!resourceType.equals(entity.getResourceType())) {
|
if (!resourceType.equals(entity.getResourceType())) {
|
||||||
|
|
|
@ -51,7 +51,6 @@ import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.jpa.patch.FhirPatch;
|
import ca.uhn.fhir.jpa.patch.FhirPatch;
|
||||||
import ca.uhn.fhir.jpa.patch.JsonPatchUtils;
|
import ca.uhn.fhir.jpa.patch.JsonPatchUtils;
|
||||||
import ca.uhn.fhir.jpa.patch.XmlPatchUtils;
|
import ca.uhn.fhir.jpa.patch.XmlPatchUtils;
|
||||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
|
||||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||||
|
@ -1407,7 +1406,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
translateSearchParams(theParams);
|
translateListSearchParams(theParams);
|
||||||
|
|
||||||
notifySearchInterceptors(theParams, theRequest);
|
notifySearchInterceptors(theParams, theRequest);
|
||||||
|
|
||||||
|
@ -1432,7 +1431,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void translateSearchParams(SearchParameterMap theParams) {
|
private void translateListSearchParams(SearchParameterMap theParams) {
|
||||||
Iterator<String> keyIterator = theParams.keySet().iterator();
|
Iterator<String> keyIterator = theParams.keySet().iterator();
|
||||||
|
|
||||||
// Translate _list=42 to _has=List:item:_id=42
|
// Translate _list=42 to _has=List:item:_id=42
|
||||||
|
|
Loading…
Reference in New Issue