Remove duplicate resource body creation (Merge branch 'optimize_jpa')

This commit is contained in:
James Agnew 2018-01-21 18:16:11 +08:00
commit 4fd3e20d06
33 changed files with 769 additions and 586 deletions

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -913,104 +913,123 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
/**
* Returns true if the resource has changed (either the contents or the tags)
*/
protected boolean populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
theEntity.setResourceType(toResourceName(theResource));
protected EncodedResource populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
if (theEntity.getResourceType() == null) {
theEntity.setResourceType(toResourceName(theResource));
}
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
for (BaseResourceReferenceDt nextRef : refs) {
if (nextRef.getReference().isEmpty() == false) {
if (nextRef.getReference().hasVersionIdPart()) {
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
if (theResource != null) {
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
for (BaseResourceReferenceDt nextRef : refs) {
if (nextRef.getReference().isEmpty() == false) {
if (nextRef.getReference().hasVersionIdPart()) {
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
}
}
}
}
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
IParser parser = encoding.newParser(myContext);
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
String encoded = parser.encodeResourceToString(theResource);
theEntity.setEncoding(encoding);
theEntity.setFhirVersion(myContext.getVersion().getVersion());
byte[] bytes;
switch (encoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
default:
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
}
ourLog.debug("Encoded {} chars of resource body as {} bytes", encoded.length(), bytes.length);
ResourceEncodingEnum encoding;
boolean changed = false;
if (theUpdateHash) {
HashFunction sha256 = Hashing.sha256();
String hashSha256 = sha256.hashBytes(bytes).toString();
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
if (theEntity.getDeleted() == null) {
encoding = myConfig.getResourceEncoding();
IParser parser = encoding.newParser(myContext);
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
String encoded = parser.encodeResourceToString(theResource);
theEntity.setFhirVersion(myContext.getVersion().getVersion());
switch (encoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
default:
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
}
ourLog.debug("Encoded {} chars of resource body as {} bytes", encoded.length(), bytes.length);
if (theUpdateHash) {
HashFunction sha256 = Hashing.sha256();
String hashSha256 = sha256.hashBytes(bytes).toString();
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
changed = true;
}
theEntity.setHashSha256(hashSha256);
}
Set<TagDefinition> allDefs = new HashSet<>();
theEntity.setHasTags(false);
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
if (theResource instanceof IResource) {
extractTagsHapi((IResource) theResource, theEntity, allDefs);
} else {
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
}
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
if (def.isStandardType() == false) {
String profile = def.getResourceProfile("");
if (isNotBlank(profile)) {
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
if (tag != null) {
allDefs.add(tag);
theEntity.addTag(tag);
theEntity.setHasTags(true);
}
}
}
ArrayList<ResourceTag> existingTags = new ArrayList<>();
if (theEntity.isHasTags()) {
existingTags.addAll(theEntity.getTags());
}
for (ResourceTag next : existingTags) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
theEntity.getTags().remove(next);
}
}
}
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
theEntity.setHashSha256(hashSha256);
} else {
theEntity.setHashSha256(null);
bytes = null;
encoding = ResourceEncodingEnum.DEL;
}
if (changed == false) {
if (theEntity.getResource() == null) {
if (theEntity.getId() == null) {
changed = true;
} else {
changed = !Arrays.equals(theEntity.getResource(), bytes);
}
}
theEntity.setResource(bytes);
Set<TagDefinition> allDefs = new HashSet<>();
theEntity.setHasTags(false);
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
if (theResource instanceof IResource) {
extractTagsHapi((IResource) theResource, theEntity, allDefs);
} else {
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
}
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
if (def.isStandardType() == false) {
String profile = def.getResourceProfile("");
if (isNotBlank(profile)) {
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
if (tag != null) {
allDefs.add(tag);
theEntity.addTag(tag);
theEntity.setHasTags(true);
ResourceHistoryTable currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
if (currentHistoryVersion == null || currentHistoryVersion.getResource() == null) {
changed = true;
} else {
changed = !Arrays.equals(currentHistoryVersion.getResource(), bytes);
}
}
}
ArrayList<ResourceTag> existingTags = new ArrayList<>();
if (theEntity.isHasTags()) {
existingTags.addAll(theEntity.getTags());
}
for (ResourceTag next : existingTags) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
theEntity.getTags().remove(next);
}
}
}
EncodedResource retVal = new EncodedResource();
retVal.setEncoding(encoding);
retVal.setResource(bytes);
retVal.setChanged(changed);
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
return changed;
return retVal;
}
@SuppressWarnings("unchecked")
@ -1183,6 +1202,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
throw new NotImplementedException("");
}
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
assert theInput != theToRemove;
if (theInput.isEmpty()) {
return theInput;
}
ArrayList<T> retVal = new ArrayList<>(theInput);
retVal.removeAll(theToRemove);
return retVal;
}
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime);
@ -1219,17 +1249,34 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation) {
ResourceHistoryTable history;
if (theEntity instanceof ResourceHistoryTable) {
history = (ResourceHistoryTable) theEntity;
} else {
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
}
if (history == null) {
return null;
}
byte[] resourceBytes = history.getResource();
ResourceEncodingEnum resourceEncoding = history.getEncoding();
String resourceText = null;
switch (theEntity.getEncoding()) {
switch (resourceEncoding) {
case JSON:
try {
resourceText = new String(theEntity.getResource(), "UTF-8");
resourceText = new String(resourceBytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new Error("Should not happen", e);
}
break;
case JSONC:
resourceText = GZipUtil.decompress(theEntity.getResource());
resourceText = GZipUtil.decompress(resourceBytes);
break;
case DEL:
break;
}
@ -1253,27 +1300,34 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
}
IParser parser = theEntity.getEncoding().newParser(getContext(theEntity.getFhirVersion()));
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
R retVal;
try {
retVal = parser.parseResource(resourceType, resourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(resourceType);
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(msg, e);
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
try {
retVal = parser.parseResource(resourceType, resourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(resourceType);
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(msg, e);
}
} else {
retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
}
if (retVal instanceof IResource) {
@ -1283,6 +1337,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
IAnyResource res = (IAnyResource) retVal;
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
}
return retVal;
}
@ -1290,11 +1346,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return myContext.getResourceDefinition(theResourceType).getName();
}
protected String toResourceName(IBaseResource theResource) {
String toResourceName(IBaseResource theResource) {
return myContext.getResourceDefinition(theResource).getName();
}
protected Long translateForcedIdToPid(String theResourceName, String theResourceId) {
Long translateForcedIdToPid(String theResourceName, String theResourceId) {
return translateForcedIdToPids(new IdDt(theResourceName, theResourceId), myForcedIdDao).get(0);
}
@ -1302,7 +1358,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return translateForcedIdToPids(theId, myForcedIdDao);
}
protected String translatePidIdToForcedId(String theResourceType, Long theId) {
private String translatePidIdToForcedId(String theResourceType, Long theId) {
ForcedId forcedId = myForcedIdDao.findByResourcePid(theId);
if (forcedId != null) {
return forcedId.getResourceType() + '/' + forcedId.getForcedId();
@ -1385,7 +1441,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
Set<ResourceLink> links = null;
Set<String> populatedResourceLinkParameters = Collections.emptySet();
boolean changed;
EncodedResource changed;
if (theDeletedTimestampOrNull != null) {
stringParams = Collections.emptySet();
@ -1403,7 +1459,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
theEntity.setHashSha256(null);
changed = true;
changed = populateResourceIntoEntity(theResource, theEntity, true);
} else {
@ -1555,7 +1611,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
if (!changed && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
if (!changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
ourLog.info("Resource {} has not changed", theEntity.getIdDt().toUnqualified().getValue());
if (theResource != null) {
populateResourceIdFromEntity(theEntity, theResource);
@ -1586,6 +1642,22 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
postUpdate(theEntity, (T) theResource);
}
/*
* Create history entry
*/
if (theCreateNewHistoryEntry) {
final ResourceHistoryTable historyEntry = theEntity.toHistory();
// if (theEntity.getVersion() > 1) {
// existing = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
// ourLog.warn("Reusing existing history entry entity {}", theEntity.getIdDt().getValue());
// }
historyEntry.setEncoding(changed.getEncoding());
historyEntry.setResource(changed.getResource());
ourLog.info("Saving history entry {}", historyEntry.getIdDt());
myResourceHistoryTableDao.save(historyEntry);
}
/*
* Update the "search param present" table which is used for the
* ?foo:missing=true queries
@ -1612,22 +1684,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
mySearchParamPresenceSvc.updatePresence(theEntity, presentSearchParams);
}
/*
* Create history entry
*/
if (theCreateNewHistoryEntry) {
ResourceHistoryTable existing = null;
// if (theEntity.getVersion() > 1) {
// existing = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
// ourLog.warn("Reusing existing history entry entity {}", theEntity.getIdDt().getValue());
// }
final ResourceHistoryTable historyEntry = theEntity.toHistory(existing);
ourLog.info("Saving history entry {}", historyEntry.getIdDt());
myResourceHistoryTableDao.save(historyEntry);
}
/*
* Indexing
*/
@ -1728,19 +1784,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return theEntity;
}
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
assert theInput != theToRemove;
if (theInput.isEmpty()) {
return theInput;
}
ArrayList<T> retVal = new ArrayList<>(theInput);
retVal.removeAll(theToRemove);
return retVal;
}
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
}
@ -1883,19 +1926,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
* parameters across a set of search parameters. An example of why
* this is needed:
* <p>
* Let's say we have a unique index on (Patient:gender AND Patient:name).
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
* Let's say we have a unique index on (Patient:gender AND Patient:name).
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
* </p>
* <p>
* In this case, because the name parameter matches both first and last name,
* we now need two unique indexes:
* <ul>
* <li>Patient?gender=male&amp;name=SMITH</li>
* <li>Patient?gender=male&amp;name=JOHN</li>
* </ul>
* In this case, because the name parameter matches both first and last name,
* we now need two unique indexes:
* <ul>
* <li>Patient?gender=male&amp;name=SMITH</li>
* <li>Patient?gender=male&amp;name=JOHN</li>
* </ul>
* </p>
* <p>
* So this recursive algorithm calculates those
* So this recursive algorithm calculates those
* </p>
*
* @param theResourceType E.g. <code>Patient
@ -1921,8 +1964,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
Collections.sort(thePartsChoices, new Comparator<List<String>>() {
@Override
public int compare(List<String> o1, List<String> o2) {
String str1=null;
String str2=null;
String str1 = null;
String str2 = null;
if (o1.size() > 0) {
str1 = o1.get(0);
}
@ -2007,10 +2050,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
/*
* The following block of code is used to strip out diacritical marks from latin script
* and also convert to upper case. E.g. "j?mes" becomes "JAMES".
*
*
* See http://www.unicode.org/charts/PDF/U0300.pdf for the logic
* behind stripping 0300-036F
*
*
* See #454 for an issue where we were completely stripping non latin characters
* See #832 for an issue where we normalize korean characters, which are decomposed
*/

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -51,6 +51,7 @@ import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.ResourceReferenceInfo;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.springframework.beans.factory.annotation.Autowired;
@ -75,6 +76,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends BaseHapiFhirDao<T> implements IFhirResourceDao<T> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;
@Autowired
@ -84,7 +86,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Autowired()
protected ISearchResultDao mySearchResultDao;
@Autowired
private DaoConfig myDaoConfig;
protected DaoConfig myDaoConfig;
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
@ -92,7 +94,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private String myResourceName;
private Class<T> myResourceType;
private String mySecondaryPrimaryKeyParamName;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@ -192,6 +193,25 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
throw new ResourceVersionConflictException("Trying to delete " + theId + " but this is not the current version");
}
// Don't delete again if it's already deleted
if (entity.getDeleted() != null) {
DaoMethodOutcome outcome = new DaoMethodOutcome();
outcome.setEntity(entity);
IIdType id = getContext().getVersion().newIdType();
id.setValue(entity.getIdDt().getValue());
outcome.setId(id);
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext());
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", 1, 0);
String severity = "information";
String code = "informational";
OperationOutcomeUtil.addIssue(getContext(), oo, severity, message, null, code);
outcome.setOperationOutcome(oo);
return outcome;
}
StopWatch w = new StopWatch();
T resourceToDelete = toResource(myResourceType, entity, false);
@ -206,7 +226,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
validateOkToDelete(theDeleteConflicts, entity);
validateOkToDelete(theDeleteConflicts, entity, false);
preDelete(resourceToDelete, entity);
@ -288,7 +308,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
validateOkToDelete(deleteConflicts, entity);
validateOkToDelete(deleteConflicts, entity, false);
// Notify interceptors
IdDt idToDelete = entity.getIdDt();
@ -300,7 +320,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Perform delete
Date updateTime = new Date();
updateEntity(null, entity, updateTime, updateTime);
resourceToDelete.setId(entity.getIdDt());
resourceToDelete.setId(entity.getIdDt());
// Notify JPA interceptors
if (theRequestDetails != null) {
@ -1223,12 +1243,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Perform update
ResourceTable savedEntity = updateEntity(theResource, entity, null, thePerformIndexing, thePerformIndexing, new Date(), theForceUpdateVersion, thePerformIndexing);
/*
/*
* If we aren't indexing (meaning we're probably executing a sub-operation within a transaction),
* we'll manually increase the version. This is important because we want the updated version number
* to be reflected in the resource shared with interceptors
*/
if (!thePerformIndexing && !savedEntity.isUnchangedInCurrentOperation()) {
if (!thePerformIndexing && !savedEntity.isUnchangedInCurrentOperation() && !ourDisableIncrementOnUpdateForUnitTest) {
if (resourceId.hasVersionIdPart() == false) {
resourceId = resourceId.withVersion(Long.toString(savedEntity.getVersion()));
}
@ -1260,7 +1280,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return outcome;
}
@Override
public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails) {
return update(theResource, theMatchUrl, thePerformIndexing, false, theRequestDetails);
@ -1304,7 +1323,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
protected void validateOkToDelete(List<DeleteConflict> theDeleteConflicts, ResourceTable theEntity) {
protected void validateOkToDelete(List<DeleteConflict> theDeleteConflicts, ResourceTable theEntity, boolean theForValidate) {
TypedQuery<ResourceLink> query = myEntityManager.createQuery("SELECT l FROM ResourceLink l WHERE l.myTargetResourcePid = :target_pid", ResourceLink.class);
query.setParameter("target_pid", theEntity.getId());
query.setMaxResults(1);
@ -1313,7 +1332,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return;
}
if (myDaoConfig.isEnforceReferentialIntegrityOnDelete() == false) {
if (myDaoConfig.isEnforceReferentialIntegrityOnDelete() == false && !theForValidate) {
ourLog.info("Deleting {} resource dependencies which can no longer be satisfied", resultList.size());
myResourceLinkDao.delete(resultList);
return;
@ -1337,4 +1356,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
@VisibleForTesting
public static void setDisableIncrementOnUpdateForUnitTest(boolean theDisableIncrementOnUpdateForUnitTest) {
ourDisableIncrementOnUpdateForUnitTest = theDisableIncrementOnUpdateForUnitTest;
}
}

View File

@ -0,0 +1,35 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
class EncodedResource {
private boolean myChanged;
private byte[] myResource;
private ResourceEncodingEnum myEncoding;
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
}
public boolean isChanged() {
return myChanged;
}
public void setChanged(boolean theChanged) {
myChanged = theChanged;
}
}

View File

@ -80,7 +80,9 @@ public class FhirResourceDaoDstu2<T extends IResource> extends BaseHapiFhirResou
// Validate that there are no resources pointing to the candidate that
// would prevent deletion
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
validateOkToDelete(deleteConflicts, entity);
if (myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
validateOkToDelete(deleteConflicts, entity, true);
}
validateDeleteConflictsEmptyOrThrowException(deleteConflicts);
OperationOutcome oo = new OperationOutcome();

View File

@ -1587,6 +1587,10 @@ public class SearchBuilder implements ISearchBuilder {
for (ResourceTable next : resultList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
IBaseResource resource = theDao.toResource(resourceType, next, theForHistoryOperation);
if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue;
}
Integer index = position.get(next.getId());
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", next.getId());

View File

@ -85,7 +85,9 @@ public class FhirResourceDaoDstu3<T extends IAnyResource> extends BaseHapiFhirRe
// Validate that there are no resources pointing to the candidate that
// would prevent deletion
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
validateOkToDelete(deleteConflicts, entity);
if (myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
validateOkToDelete(deleteConflicts, entity, true);
}
validateDeleteConflictsEmptyOrThrowException(deleteConflicts);
OperationOutcome oo = new OperationOutcome();

View File

@ -85,7 +85,9 @@ public class FhirResourceDaoR4<T extends IAnyResource> extends BaseHapiFhirResou
// Validate that there are no resources pointing to the candidate that
// would prevent deletion
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
validateOkToDelete(deleteConflicts, entity);
if (myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
validateOkToDelete(deleteConflicts, entity, true);
}
validateDeleteConflictsEmptyOrThrowException(deleteConflicts);
OperationOutcome oo = new OperationOutcome();

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -36,11 +36,6 @@ public abstract class BaseHasResource {
@Temporal(TemporalType.TIMESTAMP)
private Date myDeleted;
@Column(name = "RES_ENCODING", nullable = false, length = 5)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
@Column(name = "RES_VERSION", nullable = true, length = 7)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
@ -60,11 +55,6 @@ public abstract class BaseHasResource {
@OptimisticLock(excluded = true)
private Date myPublished;
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
@Lob()
@OptimisticLock(excluded = true)
private byte[] myResource;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_UPDATED", nullable = false)
@OptimisticLock(excluded = true)
@ -80,13 +70,6 @@ public abstract class BaseHasResource {
myDeleted = theDate;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
@ -116,16 +99,8 @@ public abstract class BaseHasResource {
}
}
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
public void setPublished(Date thePublished) {
myPublished = thePublished;
}
public abstract String getResourceType();
@ -136,8 +111,8 @@ public abstract class BaseHasResource {
return new InstantDt(myUpdated);
}
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
public void setUpdated(Date theUpdated) {
myUpdated = theUpdated;
}
public Date getUpdatedDate() {
@ -154,12 +129,12 @@ public abstract class BaseHasResource {
myHasTags = theHasTags;
}
public void setPublished(Date thePublished) {
myPublished = thePublished;
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
public void setUpdated(Date theUpdated) {
myUpdated = theUpdated;
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
}
}

View File

@ -25,11 +25,22 @@ import ca.uhn.fhir.parser.IParser;
public enum ResourceEncodingEnum {
/*
* NB: Constants in this enum must be 5 chars long or less!!!
*
* See ResourceHistoryTable RES_ENCODING column
*/
/** Json */
JSON,
/** Json Compressed */
JSONC;
JSONC,
/**
* Resource was deleted - No contents expected
*/
DEL;
public IParser newParser(FhirContext theContext) {
return theContext.newJsonParser();

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,23 +20,23 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import org.hibernate.annotations.OptimisticLock;
import javax.persistence.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.*;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
//@formatter:off
@Entity
@Table(name = "HFJ_RES_VER", uniqueConstraints = {
@UniqueConstraint(name="IDX_RESVER_ID_VER", columnNames = { "RES_ID", "RES_VER" })
}, indexes= {
@Index(name="IDX_RESVER_TYPE_DATE", columnList="RES_TYPE,RES_UPDATED"),
@Index(name="IDX_RESVER_ID_DATE", columnList="RES_ID,RES_UPDATED"),
@Index(name="IDX_RESVER_DATE", columnList="RES_UPDATED")
@UniqueConstraint(name = "IDX_RESVER_ID_VER", columnNames = {"RES_ID", "RES_VER"})
}, indexes = {
@Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"),
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
@Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED")
})
//@formatter:on
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
@ -61,11 +61,20 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
private Collection<ResourceHistoryTag> myTags;
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
@Lob()
@OptimisticLock(excluded = true)
private byte[] myResource;
@Column(name = "RES_ENCODING", nullable = false, length = 5)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
public ResourceHistoryTable() {
super();
}
public void addTag(ResourceHistoryTag theTag) {
for (ResourceHistoryTag next : getTags()) {
if (next.getTag().equals(theTag)) {
@ -93,11 +102,23 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return historyTag;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
@Override
public Long getId() {
return myId;
}
public void setId(Long theId) {
myId = theId;
}
@Override
public IdDt getIdDt() {
if (getForcedId() == null) {
@ -108,15 +129,31 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
}
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
}
public Long getResourceId() {
return myResourceId;
}
public void setResourceId(Long theResourceId) {
myResourceId = theResourceId;
}
@Override
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
@Override
public Collection<ResourceHistoryTag> getTags() {
if (myTags == null) {
@ -130,6 +167,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResourceVersion;
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
public boolean hasTag(String theTerm, String theScheme) {
for (ResourceHistoryTag next : getTags()) {
if (next.getTag().getSystem().equals(theScheme) && next.getTag().getCode().equals(theTerm)) {
@ -139,20 +180,4 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return false;
}
public void setId(Long theId) {
myId = theId;
}
public void setResourceId(Long theResourceId) {
myResourceId = theResourceId;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.OptimisticLock;
@ -39,7 +40,6 @@ import java.util.Set;
import static org.apache.commons.lang3.StringUtils.defaultString;
//@formatter:off
@Indexed(interceptor = IndexNonDeletedInterceptor.class)
@Entity
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
@ -49,13 +49,18 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
@Index(name = "IDX_RES_TYPE", columnList = "RES_TYPE"),
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
})
//@formatter:on
public class ResourceTable extends BaseHasResource implements Serializable {
static final int RESTYPE_LEN = 30;
private static final int MAX_LANGUAGE_LENGTH = 20;
private static final int MAX_PROFILE_LENGTH = 200;
private static final long serialVersionUID = 1L;
// @Transient
// private transient byte[] myResource;
//
// @Transient
// private transient ResourceEncodingEnum myEncoding;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@ -214,6 +219,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
return tag;
}
// public ResourceEncodingEnum getEncoding() {
// Validate.notNull(myEncoding, "myEncoding is null");
// return myEncoding;
// }
//
// public void setEncoding(ResourceEncodingEnum theEncoding) {
// myEncoding = theEncoding;
// }
public String getHashSha256() {
return myHashSha256;
}
@ -387,6 +401,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myProfile = theProfile;
}
// public byte[] getResource() {
// Validate.notNull(myEncoding, "myEncoding is null");
// return myResource;
// }
//
// public void setResource(byte[] theResource) {
// myResource = theResource;
// }
public Collection<ResourceLink> getResourceLinks() {
if (myResourceLinks == null) {
myResourceLinks = new ArrayList<>();
@ -527,8 +550,8 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myNarrativeText = theNarrativeText;
}
public ResourceHistoryTable toHistory(ResourceHistoryTable theResourceHistoryTable) {
ResourceHistoryTable retVal = theResourceHistoryTable != null ? theResourceHistoryTable : new ResourceHistoryTable();
public ResourceHistoryTable toHistory() {
ResourceHistoryTable retVal = new ResourceHistoryTable();
retVal.setResourceId(myId);
retVal.setResourceType(myResourceType);
@ -536,9 +559,9 @@ public class ResourceTable extends BaseHasResource implements Serializable {
retVal.setPublished(getPublished());
retVal.setUpdated(getUpdated());
retVal.setEncoding(getEncoding());
// retVal.setEncoding(getEncoding());
retVal.setFhirVersion(getFhirVersion());
retVal.setResource(getResource());
// retVal.setResource(getResource());
retVal.setDeleted(getDeleted());
retVal.setForcedId(getForcedId());

View File

@ -22,6 +22,7 @@ import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.mockito.Mockito;
@ -57,6 +58,11 @@ public abstract class BaseJpaTest {
when(mySrd.getUserData()).thenReturn(new HashMap<>());
}
@After
public final void afterPerformCleanup() {
BaseHapiFhirResourceDao.setDisableIncrementOnUpdateForUnitTest(false);
}
protected abstract FhirContext getContext();
/**

View File

@ -8,10 +8,13 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
@ -24,6 +27,7 @@ import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.util.TestUtil;
import org.springframework.transaction.support.TransactionTemplate;
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
@ -392,13 +396,14 @@ public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
*/
@Test
public void testSearchDontReindexForUpdateWithIndexDisabled() {
BaseHapiFhirResourceDao.setDisableIncrementOnUpdateForUnitTest(true);
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDiv("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
final IIdType pId1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));

View File

@ -45,41 +45,13 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2Test.class);
@Before
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
}
@After
public final void after() {
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
myDaoConfig.setTreatReferencesAsLogical(new DaoConfig().getTreatReferencesAsLogical());
myDaoConfig.setEnforceReferentialIntegrityOnDelete(new DaoConfig().isEnforceReferentialIntegrityOnDelete());
}
/**
* See #773
*/
@Test
public void testDeleteResourceWithOutboundDeletedResources() {
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
Organization org = new Organization();
org.setId("ORG");
org.setName("ORG");
myOrganizationDao.update(org);
Patient pat = new Patient();
pat.setId("PAT");
pat.setActive(true);
pat.setManagingOrganization(new ResourceReferenceDt("Organization/ORG"));
myPatientDao.update(pat);
myOrganizationDao.delete(new IdDt("Organization/ORG"));
myPatientDao.delete(new IdDt("Patient/PAT"));
}
private void assertGone(IIdType theId) {
try {
assertNotGone(theId);
@ -102,6 +74,11 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
}
@Before
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
}
private List<String> extractNames(IBundleProvider theSearch) {
ArrayList<String> retVal = new ArrayList<String>();
for (IBaseResource next : theSearch.getResources(0, theSearch.size())) {
@ -865,6 +842,29 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
/**
* See #773
*/
@Test
public void testDeleteResourceWithOutboundDeletedResources() {
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
Organization org = new Organization();
org.setId("ORG");
org.setName("ORG");
myOrganizationDao.update(org);
Patient pat = new Patient();
pat.setId("PAT");
pat.setActive(true);
pat.setManagingOrganization(new ResourceReferenceDt("Organization/ORG"));
myPatientDao.update(pat);
myOrganizationDao.delete(new IdDt("Organization/ORG"));
myPatientDao.delete(new IdDt("Patient/PAT"));
}
@Test
public void testDeleteThenUndelete() {
Patient patient = new Patient();

View File

@ -134,78 +134,6 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
}
@Test
public void testReindexing() {
Patient p = new Patient();
p.addName().addFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
myValueSetDao.create(vs, mySrd);
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
mySystemDao.markAllResourcesForReindexing();
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(null, entity.getIndexStatus());
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
// Just make sure this doesn't cause a choke
mySystemDao.performReindexingPass(100000);
// Try making the resource unparseable
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
return null;
}
});
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(2), entity.getIndexStatus());
}
@Test
public void testSystemMetaOperation() {

View File

@ -8,6 +8,8 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.hl7.fhir.instance.model.api.IIdType;
@ -19,6 +21,9 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.TestUtil;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test {
@ -473,13 +478,14 @@ public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test {
*/
@Test
public void testSearchDontReindexForUpdateWithIndexDisabled() {
BaseHapiFhirResourceDao.setDisableIncrementOnUpdateForUnitTest(true);
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDivAsString("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
final IIdType pId1 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));

View File

@ -2508,36 +2508,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
}
/**
* Can we handle content that was previously saved containing vocabulary that
* is no longer valid
*/
@Test
public void testResourceInDatabaseContainsInvalidVocabulary() {
final Patient p = new Patient();
p.setGender(AdministrativeGender.MALE);
final IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
TransactionTemplate tx = new TransactionTemplate(myTxManager);
tx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
tx.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8));
table.setEncoding(ResourceEncodingEnum.JSON);
myResourceTableDao.save(table);
}
});
Patient read = myPatientDao.read(id);
String string = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(read);
ourLog.info(string);
assertThat(string, containsString("value=\"foo\""));
}
@Test
public void testResourceInstanceMetaOperation() {

View File

@ -3,8 +3,6 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
@ -26,15 +24,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.*;
import org.mockito.ArgumentCaptor;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
@ -444,79 +439,6 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
}
}
@Test
public void testReindexing() {
Patient p = new Patient();
p.addName().setFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
myValueSetDao.create(vs, mySrd);
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
mySystemDao.markAllResourcesForReindexing();
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(null, entity.getIndexStatus());
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
// Just make sure this doesn't cause a choke
mySystemDao.performReindexingPass(100000);
// Try making the resource unparseable
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
return null;
}
});
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(2), entity.getIndexStatus());
}
@Test
public void testSystemMetaOperation() {

View File

@ -180,6 +180,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
@ -234,6 +236,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@After()
public void afterCleanupDao() {
myDaoConfig.setExpireSearchResults(new DaoConfig().isExpireSearchResults());
myDaoConfig.setEnforceReferentialIntegrityOnDelete(new DaoConfig().isEnforceReferentialIntegrityOnDelete());
myDaoConfig.setExpireSearchResultsAfterMillis(new DaoConfig().getExpireSearchResultsAfterMillis());
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
myDaoConfig.setSuppressUpdatesWithNoChange(new DaoConfig().isSuppressUpdatesWithNoChange());

View File

@ -74,7 +74,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
}
@Test
public void testTransactionCreateWithUuidResourceStrategy() throws Exception {
public void testTransactionCreateWithUuidResourceStrategy() {
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
Organization org = new Organization();

View File

@ -8,6 +8,8 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.instance.model.api.IIdType;
@ -19,6 +21,9 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.TestUtil;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
@ -488,13 +493,14 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
*/
@Test
public void testSearchDontReindexForUpdateWithIndexDisabled() {
BaseHapiFhirResourceDao.setDisableIncrementOnUpdateForUnitTest(true);
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDivAsString("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
final IIdType pId1 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));

View File

@ -1108,9 +1108,52 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testDeleteTwicePerformsNoOp() {
Patient patient = new Patient();
patient.setActive(true);
IIdType id = myPatientDao.create(patient, mySrd).getId();
assertNotNull(id.getIdPartAsLong());
assertEquals("1", id.getVersionIdPart());
IIdType id2 = myPatientDao.delete(id.toUnqualifiedVersionless()).getId();
assertEquals(id.getIdPart(), id2.getIdPart());
assertEquals("2", id2.getVersionIdPart());
IIdType id3 = myPatientDao.delete(id.toUnqualifiedVersionless()).getId();
assertEquals(id.getIdPart(), id3.getIdPart());
assertEquals("2", id3.getVersionIdPart());
IIdType id4 = myPatientDao.delete(id.toUnqualifiedVersionless()).getId();
assertEquals(id.getIdPart(), id4.getIdPart());
assertEquals("2", id4.getVersionIdPart());
patient = new Patient();
patient.setId(id.getIdPart());
patient.setActive(false);
IIdType id5 = myPatientDao.update(patient).getId();
assertEquals(id.getIdPart(), id5.getIdPart());
assertEquals("3", id5.getVersionIdPart());
patient = myPatientDao.read(id.withVersion("1"));
assertEquals(true, patient.getActive());
try {
myPatientDao.read(id.withVersion("2"));
fail();
} catch (ResourceGoneException e) {
// good
}
patient = myPatientDao.read(id.withVersion("3"));
assertEquals(false, patient.getActive());
}
@Test
public void testDeleteResource() {
int initialHistory = myPatientDao.history((Date) null, null, mySrd).size();
int initialHistory = myPatientDao.history(null, null, mySrd).size();
IIdType id1;
IIdType id2;
@ -1132,7 +1175,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
patient.addIdentifier().setSystem("ZZZZZZZ").setValue("ZZZZZZZZZ");
id2b = myPatientDao.update(patient, mySrd).getId();
}
ourLog.info("ID1:{} ID2:{} ID2b:{}", new Object[] { id1, id2, id2b });
ourLog.info("ID1:{} ID2:{} ID2b:{}", id1, id2, id2b);
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
@ -1153,7 +1196,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
// good
}
IBundleProvider history = myPatientDao.history((Date) null, null, mySrd);
IBundleProvider history = myPatientDao.history(null, null, mySrd);
assertEquals(4 + initialHistory, history.size().intValue());
List<IBaseResource> resources = history.getResources(0, 4);
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) resources.get(0)));
@ -1525,7 +1568,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
// By type
history = myPatientDao.history((Date) null, null, mySrd);
history = myPatientDao.history(null, null, mySrd);
assertEquals(fullSize + 1, history.size().intValue());
for (int i = 0; i < fullSize; i++) {
String expected = id.withVersion(Integer.toString(fullSize + 1 - i)).getValue();
@ -1592,7 +1635,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
// By type
history = myPatientDao.history((Date) null, null, mySrd);
history = myPatientDao.history(null, null, mySrd);
assertEquals(fullSize + 1, history.size().intValue());
for (int i = 0; i < fullSize; i++) {
String expected = id.withVersion(Integer.toString(fullSize + 1 - i)).getValue();
@ -1643,13 +1686,13 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testHistoryReflectsMetaOperations() throws Exception {
public void testHistoryReflectsMetaOperations() {
Patient inPatient = new Patient();
inPatient.addName().setFamily("version1");
inPatient.getMeta().addProfile("http://example.com/1");
IIdType id = myPatientDao.create(inPatient, mySrd).getId().toUnqualifiedVersionless();
IBundleProvider history = myPatientDao.history((Date) null, null, mySrd);
IBundleProvider history = myPatientDao.history(null, null, mySrd);
assertEquals(1, history.size().intValue());
Patient outPatient = (Patient) history.getResources(0, 1).get(0);
assertEquals("version1", inPatient.getName().get(0).getFamily());
@ -1663,7 +1706,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
inPatient.getMeta().addProfile("http://example.com/2");
myPatientDao.metaAddOperation(id, inPatient.getMeta(), mySrd);
history = myPatientDao.history((Date) null, null, mySrd);
history = myPatientDao.history(null, null, mySrd);
assertEquals(1, history.size().intValue());
outPatient = (Patient) history.getResources(0, 1).get(0);
assertEquals("version1", inPatient.getName().get(0).getFamily());
@ -1679,7 +1722,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
inPatient.getName().get(0).setFamily("version2");
myPatientDao.update(inPatient, mySrd);
history = myPatientDao.history((Date) null, null, mySrd);
history = myPatientDao.history(null, null, mySrd);
assertEquals(2, history.size().intValue());
outPatient = (Patient) history.getResources(0, 2).get(0);
assertEquals("version2", outPatient.getName().get(0).getFamily());
@ -1694,7 +1737,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testHistoryWithDeletedResource() throws Exception {
public void testHistoryWithDeletedResource() {
String methodName = "testHistoryWithDeletedResource";
Patient patient = new Patient();
@ -1707,9 +1750,9 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
IBundleProvider history = myPatientDao.history(id, null, null, mySrd);
List<IBaseResource> entries = history.getResources(0, 3);
ourLog.info(((IAnyResource) entries.get(0)).getIdElement() + " - " + ((IAnyResource) entries.get(0)).getMeta().getLastUpdated());
ourLog.info(((IAnyResource) entries.get(1)).getIdElement() + " - " + ((IAnyResource) entries.get(1)).getMeta().getLastUpdated());
ourLog.info(((IAnyResource) entries.get(2)).getIdElement() + " - " + ((IAnyResource) entries.get(2)).getMeta().getLastUpdated());
ourLog.info(entries.get(0).getIdElement() + " - " + entries.get(0).getMeta().getLastUpdated());
ourLog.info(entries.get(1).getIdElement() + " - " + entries.get(1).getMeta().getLastUpdated());
ourLog.info(entries.get(2).getIdElement() + " - " + entries.get(2).getMeta().getLastUpdated());
assertEquals(3, history.size().intValue());
assertEquals(id.withVersion("3"), entries.get(0).getIdElement());
@ -1773,7 +1816,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
// No since
IBundleProvider history = myPatientDao.history((Date) null, null, mySrd);
IBundleProvider history = myPatientDao.history(null, null, mySrd);
assertEquals(1, history.size().intValue());
Patient outPatient = (Patient) history.getResources(0, 1).get(0);
assertEquals("version1", inPatient.getName().get(0).getFamily());
@ -1797,7 +1840,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testHistoryWithInvalidId() throws Exception {
public void testHistoryWithInvalidId() {
try {
myPatientDao.history(new IdType("Patient/FOOFOOFOO"), null, null, mySrd);
fail();
@ -2175,7 +2218,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
dr01.setSubject(new Reference(patientId01));
IIdType drId01 = myDiagnosticReportDao.create(dr01, mySrd).getId();
ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", new Object[] { patientId01, patientId02, obsId01, obsId02, drId01 });
ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", patientId01, patientId02, obsId01, obsId02, drId01);
List<Observation> result = toList(myObservationDao.search(new SearchParameterMap(Observation.SP_SUBJECT, new ReferenceParam(patientId01.getIdPart())).setLoadSynchronous(true)));
assertEquals(1, result.size());
@ -2185,7 +2228,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
assertEquals(1, result.size());
assertEquals(obsId02.getIdPart(), result.get(0).getIdElement().getIdPart());
result = toList(myObservationDao.search(new SearchParameterMap(Observation.SP_SUBJECT, new ReferenceParam("999999999999")).setLoadSynchronous(true)));;
result = toList(myObservationDao.search(new SearchParameterMap(Observation.SP_SUBJECT, new ReferenceParam("999999999999")).setLoadSynchronous(true)));
assertEquals(0, result.size());
}
@ -2268,7 +2311,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
long id = outcome.getId().getIdPartAsLong();
TokenParam value = new TokenParam("urn:system", "001testPersistSearchParams");
List<Patient> found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_IDENTIFIER, value).setLoadSynchronous(true)));;
List<Patient> found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_IDENTIFIER, value).setLoadSynchronous(true)));
assertEquals(1, found.size());
assertEquals(id, found.get(0).getIdElement().getIdPartAsLong().longValue());
@ -2380,7 +2423,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testReadInvalidVersion() throws Exception {
public void testReadInvalidVersion() {
String methodName = "testReadInvalidVersion";
Patient pat = new Patient();
@ -2565,12 +2608,12 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
tx.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8));
table.setEncoding(ResourceEncodingEnum.JSON);
myResourceTableDao.save(table);
myResourceHistoryTableDao.save(table);
}
});
@ -2592,7 +2635,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
id1 = myPatientDao.create(patient, mySrd).getId();
Meta metaAdd = new Meta();
metaAdd.addTag().setSystem((String) null).setCode("Dog").setDisplay("Puppies");
metaAdd.addTag().setSystem(null).setCode("Dog").setDisplay("Puppies");
metaAdd.addSecurity().setSystem("seclabel:sys:1").setCode("seclabel:code:1").setDisplay("seclabel:dis:1");
metaAdd.addProfile("http://profile/1");
myPatientDao.metaAddOperation(id1, metaAdd, mySrd);
@ -2662,7 +2705,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
{
Meta metaDel = new Meta();
metaDel.addTag().setSystem((String) null).setCode("Dog");
metaDel.addTag().setSystem(null).setCode("Dog");
metaDel.addSecurity().setSystem("seclabel:sys:1").setCode("seclabel:code:1");
metaDel.addProfile("http://profile/1");
myPatientDao.metaDeleteOperation(id1, metaDel, mySrd);
@ -3387,7 +3430,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
* Test for issue #60
*/
@Test
public void testStoreUtf8Characters() throws Exception {
public void testStoreUtf8Characters() {
Organization org = new Organization();
org.setName("測試醫院");
org.addIdentifier().setSystem("urn:system").setValue("testStoreUtf8Characters_01");
@ -3547,7 +3590,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testTimingSearchParams() throws Exception {
public void testTimingSearchParams() {
Date before = new DateTimeType("2011-01-01T10:00:00Z").getValue();
Date middle = new DateTimeType("2011-01-02T10:00:00Z").getValue();
Date after = new DateTimeType("2011-01-03T10:00:00Z").getValue();
@ -3613,7 +3656,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testUpdateRejectsIdWhichPointsToForcedId() throws InterruptedException {
public void testUpdateRejectsIdWhichPointsToForcedId() {
Patient p1 = new Patient();
p1.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsIdWhichPointsToForcedId01");
p1.addName().setFamily("Tester").addGiven("testUpdateRejectsIdWhichPointsToForcedId01");

View File

@ -270,6 +270,46 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
}
@Test
public void testValidateForDeleteWithReferentialIntegrityDisabled() {
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
String methodName = "testValidateForDelete";
Organization org = new Organization();
org.setName(methodName);
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
Patient pat = new Patient();
pat.addName().setFamily(methodName);
pat.getManagingOrganization().setReference(orgId.getValue());
IIdType patId = myPatientDao.create(pat, mySrd).getId().toUnqualifiedVersionless();
myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null, mySrd);
myDaoConfig.setEnforceReferentialIntegrityOnDelete(true);
try {
myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null, mySrd);
fail();
} catch (ResourceVersionConflictException e) {
// good
}
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
myOrganizationDao.read(orgId);
myOrganizationDao.delete(orgId);
try {
myOrganizationDao.read(orgId);
fail();
} catch (ResourceGoneException e) {
// good
}
}
private IBaseResource findResourceByIdInBundle(Bundle vss, String name) {
IBaseResource retVal = null;
for (BundleEntryComponent next : vss.getEntry()) {

View File

@ -3,10 +3,8 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.primitive.IdDt;
@ -26,6 +24,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
import org.junit.*;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
@ -443,7 +442,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
public void testReindexing() {
Patient p = new Patient();
p.addName().setFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualified();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
@ -487,15 +486,19 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
myResourceHistoryTableDao.save(resourceHistoryTable);
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
table.setIndexStatus(null);
myResourceTableDao.save(table);
return null;
}
});

View File

@ -1,45 +1,48 @@
package ca.uhn.fhir.jpa.provider;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.*;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.dstu2.BaseJpaDstu2Test;
import ca.uhn.fhir.jpa.rp.dstu2.*;
import ca.uhn.fhir.jpa.rp.dstu2.ObservationResourceProvider;
import ca.uhn.fhir.jpa.rp.dstu2.OrganizationResourceProvider;
import ca.uhn.fhir.jpa.rp.dstu2.PatientResourceProvider;
import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.valueset.*;
import ca.uhn.fhir.model.dstu2.valueset.AdministrativeGenderEnum;
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.util.TestUtil;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchDstu2Test.class);
private static RestfulServer myRestServer;
private static IGenericClient ourClient;
private static FhirContext ourCtx;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchDstu2Test.class);
private static Server ourServer;
private static String ourServerBase;
private SimpleRequestHeaderInterceptor mySimpleHeaderInterceptor;
@SuppressWarnings("deprecation")
@After
public void after() {
@ -53,7 +56,7 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
mySimpleHeaderInterceptor = new SimpleRequestHeaderInterceptor();
ourClient.registerInterceptor(mySimpleHeaderInterceptor);
}
@Before
public void beforeStartServer() throws Exception {
if (myRestServer == null) {
@ -96,11 +99,11 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
ourClient = ourCtx.newRestfulGenericClient(ourServerBase);
myRestServer = restServer;
}
myRestServer.setDefaultResponseEncoding(EncodingEnum.XML);
myRestServer.setPagingProvider(myPagingProvider);
}
private List<String> create20Patients() {
List<String> ids = new ArrayList<String>();
@ -108,7 +111,7 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
Patient patient = new Patient();
patient.setGender(AdministrativeGenderEnum.MALE);
patient.addIdentifier().setSystem("urn:foo").setValue("A");
patient.addName().addFamily("abcdefghijklmnopqrstuvwxyz".substring(i, i+1));
patient.addName().addFamily("abcdefghijklmnopqrstuvwxyz".substring(i, i + 1));
String id = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getValue();
ids.add(id);
}
@ -116,9 +119,9 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
}
@Test
public void testBatchWithGetHardLimitLargeSynchronous() throws Exception {
public void testBatchWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.BATCH);
input
@ -126,12 +129,12 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
.getRequest()
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5&_sort=name");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(1, output.getEntry().size());
Bundle respBundle = (Bundle) output.getEntry().get(0).getResource();
assertEquals(5, respBundle.getEntry().size());
@ -139,11 +142,11 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
List<String> actualIds = toIds(respBundle);
assertThat(actualIds, contains(ids.subList(0, 5).toArray(new String[0])));
}
@Test
public void testBatchWithGetNormalSearch() throws Exception {
public void testBatchWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.BATCH);
input
@ -151,16 +154,16 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
.getRequest()
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5&_sort=name");
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(1, output.getEntry().size());
Bundle respBundle = (Bundle) output.getEntry().get(0).getResource();
assertEquals(5, respBundle.getEntry().size());
List<String> actualIds = toIds(respBundle);
assertThat(actualIds, contains(ids.subList(0, 5).toArray(new String[0])));
String nextPageLink = respBundle.getLink("next").getUrl();
output = ourClient.loadPage().byUrl(nextPageLink).andReturnBundle(Bundle.class).execute();
respBundle = output;
@ -173,10 +176,10 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
* 30 searches in one batch! Whoa!
*/
@Test
public void testBatchWithManyGets() throws Exception {
public void testBatchWithManyGets() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.BATCH);
for (int i = 0; i < 30; i++) {
@ -186,10 +189,10 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5&identifier=urn:foo|A,AAAAA" + i);
}
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(30, output.getEntry().size());
for (int i = 0; i < 30; i++) {
Bundle respBundle = (Bundle) output.getEntry().get(i).getResource();
@ -201,22 +204,22 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
}
@Test
public void testTransactionWithGetHardLimitLargeSynchronous() throws Exception {
public void testTransactionWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.TRANSACTION);
input
.addEntry()
.getRequest()
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5");
.setUrl("Patient?_count=5&_sort=_id");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(1, output.getEntry().size());
Bundle respBundle = (Bundle) output.getEntry().get(0).getResource();
assertEquals(5, respBundle.getEntry().size());
@ -224,11 +227,11 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
List<String> actualIds = toIds(respBundle);
assertThat(actualIds, contains(ids.subList(0, 5).toArray(new String[0])));
}
@Test
public void testTransactionWithGetNormalSearch() throws Exception {
public void testTransactionWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.TRANSACTION);
input
@ -236,16 +239,16 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
.getRequest()
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5&_sort=name");
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(1, output.getEntry().size());
Bundle respBundle = (Bundle) output.getEntry().get(0).getResource();
assertEquals(5, respBundle.getEntry().size());
List<String> actualIds = toIds(respBundle);
assertThat(actualIds, contains(ids.subList(0, 5).toArray(new String[0])));
String nextPageLink = respBundle.getLink("next").getUrl();
output = ourClient.loadPage().byUrl(nextPageLink).andReturnBundle(Bundle.class).execute();
respBundle = output;
@ -258,10 +261,10 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
* 30 searches in one Transaction! Whoa!
*/
@Test
public void testTransactionWithManyGets() throws Exception {
public void testTransactionWithManyGets() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
input.setType(BundleTypeEnum.TRANSACTION);
for (int i = 0; i < 30; i++) {
@ -271,10 +274,10 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
.setMethod(HTTPVerbEnum.GET)
.setUrl("Patient?_count=5&identifier=urn:foo|A,AAAAA" + i);
}
Bundle output = ourClient.transaction().withBundle(input).execute();
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(30, output.getEntry().size());
for (int i = 0; i < 30; i++) {
Bundle respBundle = (Bundle) output.getEntry().get(i).getResource();

View File

@ -126,7 +126,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testBatchWithGetHardLimitLargeSynchronous() throws Exception {
public void testBatchWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -135,7 +135,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
.addEntry()
.getRequest()
.setMethod(HTTPVerb.GET)
.setUrl("Patient?_count=5");
.setUrl("Patient?_count=5&_sort=_id");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
@ -151,7 +151,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testBatchWithGetNormalSearch() throws Exception {
public void testBatchWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -183,7 +183,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
* 30 searches in one batch! Whoa!
*/
@Test
public void testBatchWithManyGets() throws Exception {
public void testBatchWithManyGets() {
List<String> ids = create20Patients();
@ -211,7 +211,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testTransactionWithGetHardLimitLargeSynchronous() throws Exception {
public void testTransactionWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -220,7 +220,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
.addEntry()
.getRequest()
.setMethod(HTTPVerb.GET)
.setUrl("Patient?_count=5");
.setUrl("Patient?_count=5&_sort=_id");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
@ -236,7 +236,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testTransactionWithGetNormalSearch() throws Exception {
public void testTransactionWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -268,7 +268,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
* 30 searches in one Transaction! Whoa!
*/
@Test
public void testTransactionWithManyGets() throws Exception {
public void testTransactionWithManyGets() {
List<String> ids = create20Patients();

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.util.StopWatch;
@ -54,6 +55,8 @@ import org.junit.*;
import org.springframework.test.util.AopTestUtils;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import java.io.BufferedReader;
import java.io.IOException;
@ -1642,31 +1645,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
// private void delete(String theResourceType, String theParamName, String theParamValue) {
// Bundle resources;
// do {
// IQuery<Bundle> forResource = myClient.search().forResource(theResourceType);
// if (theParamName != null) {
// forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
// }
// resources = forResource.execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// myClient.delete().resource(next).execute();
// }
// } while (resources.size() > 0);
// }
//
// private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
// {
// Bundle resources = myClient.search().forResource(theResourceType).where(new
// TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// myClient.delete().resource(next).execute();
// }
// }
@Test
public void testHasParameter() throws Exception {
IIdType pid0;
@ -1704,6 +1682,31 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertThat(ids, contains(pid0.getValue()));
}
// private void delete(String theResourceType, String theParamName, String theParamValue) {
// Bundle resources;
// do {
// IQuery<Bundle> forResource = myClient.search().forResource(theResourceType);
// if (theParamName != null) {
// forResource = forResource.where(new StringClientParam(theParamName).matches().value(theParamValue));
// }
// resources = forResource.execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// myClient.delete().resource(next).execute();
// }
// } while (resources.size() > 0);
// }
//
// private void deleteToken(String theResourceType, String theParamName, String theParamSystem, String theParamValue)
// {
// Bundle resources = myClient.search().forResource(theResourceType).where(new
// TokenClientParam(theParamName).exactly().systemAndCode(theParamSystem, theParamValue)).execute();
// for (IResource next : resources.toListOfResources()) {
// ourLog.info("Deleting resource: {}", next.getId());
// myClient.delete().resource(next).execute();
// }
// }
@Test
public void testHasParameterNoResults() throws Exception {
@ -2347,6 +2350,54 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
}
@Test
public void testRetrieveMissingVersionsDoesntCrashSearch() {
Patient p1 = new Patient();
p1.setActive(true);
final IIdType id1 = myClient.create().resource(p1).execute().getId();
Patient p2 = new Patient();
p2.setActive(false);
IIdType id2 = myClient.create().resource(p2).execute().getId();
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
myResourceHistoryTableDao.delete(version);
}
});
Bundle bundle = myClient.search().forResource("Patient").returnBundle(Bundle.class).execute();
assertEquals(2, bundle.getTotal());
assertEquals(1, bundle.getEntry().size());
assertEquals(id2.getIdPart(), bundle.getEntry().get(0).getResource().getIdElement().getIdPart());
}
@Test
public void testRetrieveMissingVersionsDoesntCrashHistory() {
Patient p1 = new Patient();
p1.setActive(true);
final IIdType id1 = myClient.create().resource(p1).execute().getId();
Patient p2 = new Patient();
p2.setActive(false);
IIdType id2 = myClient.create().resource(p2).execute().getId();
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
myResourceHistoryTableDao.delete(version);
}
});
Bundle bundle = myClient.history().onServer().andReturnBundle(Bundle.class).execute();
assertEquals(1, bundle.getTotal());
assertEquals(1, bundle.getEntry().size());
assertEquals(id2.getIdPart(), bundle.getEntry().get(0).getResource().getIdElement().getIdPart());
}
@Test
public void testSaveAndRetrieveExistingNarrativeJson() {
Patient p1 = new Patient();

View File

@ -33,11 +33,11 @@ import ca.uhn.fhir.util.TestUtil;
public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchR4Test.class);
private static RestfulServer myRestServer;
private static IGenericClient ourClient;
private static FhirContext ourCtx;
private static CloseableHttpClient ourHttpClient;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchR4Test.class);
private static Server ourServer;
private static String ourServerBase;
private SimpleRequestHeaderInterceptor mySimpleHeaderInterceptor;
@ -116,17 +116,19 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
List<String> ids = new ArrayList<String>();
for (int i = 0; i < 20; i++) {
Patient patient = new Patient();
char letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".charAt(i);
patient.setId("" + letter);
patient.setGender(AdministrativeGender.MALE);
patient.addIdentifier().setSystem("urn:foo").setValue("A");
patient.addName().setFamily("abcdefghijklmnopqrstuvwxyz".substring(i, i+1));
String id = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getValue();
String id = myPatientDao.update(patient).getId().toUnqualifiedVersionless().getValue();
ids.add(id);
}
return ids;
}
@Test
public void testBatchWithGetHardLimitLargeSynchronous() throws Exception {
public void testBatchWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -135,7 +137,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
.addEntry()
.getRequest()
.setMethod(HTTPVerb.GET)
.setUrl("Patient?_count=5");
.setUrl("Patient?_count=5&_sort=_id");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
@ -151,7 +153,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
}
@Test
public void testBatchWithGetNormalSearch() throws Exception {
public void testBatchWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -183,7 +185,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
* 30 searches in one batch! Whoa!
*/
@Test
public void testBatchWithManyGets() throws Exception {
public void testBatchWithManyGets() {
List<String> ids = create20Patients();
@ -211,7 +213,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
}
@Test
public void testTransactionWithGetHardLimitLargeSynchronous() throws Exception {
public void testTransactionWithGetHardLimitLargeSynchronous() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -220,7 +222,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
.addEntry()
.getRequest()
.setMethod(HTTPVerb.GET)
.setUrl("Patient?_count=5");
.setUrl("Patient?_count=5&_sort=_id");
myDaoConfig.setMaximumSearchResultCountInTransaction(100);
@ -236,7 +238,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
}
@Test
public void testTransactionWithGetNormalSearch() throws Exception {
public void testTransactionWithGetNormalSearch() {
List<String> ids = create20Patients();
Bundle input = new Bundle();
@ -268,7 +270,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
* 30 searches in one Transaction! Whoa!
*/
@Test
public void testTransactionWithManyGets() throws Exception {
public void testTransactionWithManyGets() {
List<String> ids = create20Patients();

View File

@ -27,6 +27,7 @@ import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import ca.uhn.fhir.util.CollectionUtil;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -308,6 +309,29 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
}
}
/*
* Remove any null entries in the list - This generally shouldn't happen but can if
* data has been manually purged from the JPA database
*/
boolean hasNull = false;
for (IBaseResource next : resourceList) {
if (next == null) {
hasNull = true;
break;
}
}
if (hasNull) {
for (Iterator<IBaseResource> iter = resourceList.iterator(); iter.hasNext(); ) {
if (iter.next() == null) {
iter.remove();
}
}
}
/*
* Make sure all returned resources have an ID (if not, this is a bug
* in the user server code)
*/
for (IBaseResource next : resourceList) {
if (next.getIdElement() == null || next.getIdElement().isEmpty()) {
if (!(next instanceof BaseOperationOutcome)) {

View File

@ -1,9 +1,11 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] - %msg%n
</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n</pattern>
</encoder>
</appender>

View File

@ -1026,6 +1026,14 @@
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>com.gemnasium</groupId>
<artifactId>gemnasium-maven-plugin</artifactId>
<version>0.2.0</version>
<configuration>
<projectSlug>github.com/jamesagnew/hapi-fhir</projectSlug>
</configuration>
</plugin>
<plugin>
<groupId>org.basepom.maven</groupId>
<artifactId>duplicate-finder-maven-plugin</artifactId>

View File

@ -86,6 +86,12 @@
DSTU3/R4 structure in the getMeta() version field instead of in the
getIdElement() ID. Thanks to GitHub user @Chrisjobling for reporting!
</action>
<action type="fix">
A bug was fixed in the JPA server when performing a validate operation with a mode
of DELETE on a server with referential integrity disabled, the validate operation would delete
resource reference indexes as though the delete was actually happening, which negatively
affected searching for the resource being validated.
</action>
<action type="add">
The HAPI FHIR Server framework now has initial support for
multitenancy. At this time the support is limited to the server

View File

@ -19,6 +19,15 @@
and support for draft pre-release versions of FHIR are shown in
<span style="background: #EEB; padding: 3px;">YELLOW</span>.
</p>
<p>
Note also that after the release of the FHIR DSTU2 specification, the FHIR
standard itself stopped using the DSTUx naming scheme, in favour or naming
new releases STUx or simply Rx. Because HAPI FHIR already had draft support
for what was then called DSTU3 at this time, we did not update our naming
conventions until R4 in order to avoid breaking existing users' code.
From the perspective of a user of HAPI FHIR, consider the terms
DSTU3 / STU3 / R3 to be interchangeable.
</p>
<table>
<thead>
<tr>