Start work on removing duplicate resource body
This commit is contained in:
parent
b18e71d4f5
commit
2f5ce64755
hapi-fhir-jpaserver-base/src
main/java/ca/uhn/fhir/jpa
dao
entity
test/java/ca/uhn/fhir/jpa/dao
src/site/xdoc
|
@ -911,102 +911,117 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
/**
|
/**
|
||||||
* Returns true if the resource has changed (either the contents or the tags)
|
* Returns true if the resource has changed (either the contents or the tags)
|
||||||
*/
|
*/
|
||||||
protected boolean populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
|
protected EncodedResource populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
|
||||||
theEntity.setResourceType(toResourceName(theResource));
|
if (theEntity.getResourceType() == null) {
|
||||||
|
theEntity.setResourceType(toResourceName(theResource));
|
||||||
|
}
|
||||||
|
|
||||||
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
|
if (theResource != null) {
|
||||||
for (BaseResourceReferenceDt nextRef : refs) {
|
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
|
||||||
if (nextRef.getReference().isEmpty() == false) {
|
for (BaseResourceReferenceDt nextRef : refs) {
|
||||||
if (nextRef.getReference().hasVersionIdPart()) {
|
if (nextRef.getReference().isEmpty() == false) {
|
||||||
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
|
if (nextRef.getReference().hasVersionIdPart()) {
|
||||||
|
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
|
|
||||||
|
|
||||||
IParser parser = encoding.newParser(myContext);
|
|
||||||
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
|
|
||||||
String encoded = parser.encodeResourceToString(theResource);
|
|
||||||
|
|
||||||
theEntity.setEncoding(encoding);
|
|
||||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
|
||||||
byte[] bytes;
|
byte[] bytes;
|
||||||
switch (encoding) {
|
ResourceEncodingEnum encoding;
|
||||||
case JSON:
|
|
||||||
bytes = encoded.getBytes(Charsets.UTF_8);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
case JSONC:
|
|
||||||
bytes = GZipUtil.compress(encoded);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
|
|
||||||
if (theUpdateHash) {
|
if (theEntity.getDeleted() == null) {
|
||||||
HashFunction sha256 = Hashing.sha256();
|
|
||||||
String hashSha256 = sha256.hashBytes(bytes).toString();
|
encoding = myConfig.getResourceEncoding();
|
||||||
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
|
IParser parser = encoding.newParser(myContext);
|
||||||
|
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
|
||||||
|
String encoded = parser.encodeResourceToString(theResource);
|
||||||
|
|
||||||
|
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||||
|
switch (encoding) {
|
||||||
|
case JSON:
|
||||||
|
bytes = encoded.getBytes(Charsets.UTF_8);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
case JSONC:
|
||||||
|
bytes = GZipUtil.compress(encoded);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theUpdateHash) {
|
||||||
|
HashFunction sha256 = Hashing.sha256();
|
||||||
|
String hashSha256 = sha256.hashBytes(bytes).toString();
|
||||||
|
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
|
||||||
|
changed = true;
|
||||||
|
}
|
||||||
|
theEntity.setHashSha256(hashSha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<TagDefinition> allDefs = new HashSet<>();
|
||||||
|
|
||||||
|
theEntity.setHasTags(false);
|
||||||
|
|
||||||
|
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
|
||||||
|
|
||||||
|
if (theResource instanceof IResource) {
|
||||||
|
extractTagsHapi((IResource) theResource, theEntity, allDefs);
|
||||||
|
} else {
|
||||||
|
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
|
||||||
|
}
|
||||||
|
|
||||||
|
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||||
|
if (def.isStandardType() == false) {
|
||||||
|
String profile = def.getResourceProfile("");
|
||||||
|
if (isNotBlank(profile)) {
|
||||||
|
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||||
|
if (tag != null) {
|
||||||
|
allDefs.add(tag);
|
||||||
|
theEntity.addTag(tag);
|
||||||
|
theEntity.setHasTags(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayList<ResourceTag> existingTags = new ArrayList<>();
|
||||||
|
if (theEntity.isHasTags()) {
|
||||||
|
existingTags.addAll(theEntity.getTags());
|
||||||
|
}
|
||||||
|
for (ResourceTag next : existingTags) {
|
||||||
|
TagDefinition nextDef = next.getTag();
|
||||||
|
if (!allDefs.contains(nextDef)) {
|
||||||
|
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
|
||||||
|
theEntity.getTags().remove(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
|
||||||
|
if (!allTagsOld.equals(allTagsNew)) {
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
theEntity.setHashSha256(hashSha256);
|
|
||||||
|
} else {
|
||||||
|
theEntity.setHashSha256(null);
|
||||||
|
bytes = null;
|
||||||
|
encoding = ResourceEncodingEnum.DEL;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (changed == false) {
|
if (changed == false) {
|
||||||
if (theEntity.getResource() == null) {
|
ResourceHistoryTable currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
||||||
|
if (currentHistoryVersion == null || currentHistoryVersion.getResource() == null) {
|
||||||
changed = true;
|
changed = true;
|
||||||
} else {
|
} else {
|
||||||
changed = !Arrays.equals(theEntity.getResource(), bytes);
|
changed = !Arrays.equals(currentHistoryVersion.getResource(), bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
theEntity.setResource(bytes);
|
EncodedResource retVal = new EncodedResource();
|
||||||
|
retVal.setEncoding(encoding);
|
||||||
|
retVal.setResource(bytes);
|
||||||
|
retVal.setChanged(changed);
|
||||||
|
|
||||||
Set<TagDefinition> allDefs = new HashSet<TagDefinition>();
|
return retVal;
|
||||||
|
|
||||||
theEntity.setHasTags(false);
|
|
||||||
|
|
||||||
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
|
|
||||||
|
|
||||||
if (theResource instanceof IResource) {
|
|
||||||
extractTagsHapi((IResource) theResource, theEntity, allDefs);
|
|
||||||
} else {
|
|
||||||
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
|
|
||||||
}
|
|
||||||
|
|
||||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
|
||||||
if (def.isStandardType() == false) {
|
|
||||||
String profile = def.getResourceProfile("");
|
|
||||||
if (isNotBlank(profile)) {
|
|
||||||
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
|
||||||
if (tag != null) {
|
|
||||||
allDefs.add(tag);
|
|
||||||
theEntity.addTag(tag);
|
|
||||||
theEntity.setHasTags(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayList<ResourceTag> existingTags = new ArrayList<ResourceTag>();
|
|
||||||
if (theEntity.isHasTags()) {
|
|
||||||
existingTags.addAll(theEntity.getTags());
|
|
||||||
}
|
|
||||||
for (ResourceTag next : existingTags) {
|
|
||||||
TagDefinition nextDef = next.getTag();
|
|
||||||
if (!allDefs.contains(nextDef)) {
|
|
||||||
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
|
|
||||||
theEntity.getTags().remove(next);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
|
|
||||||
if (!allTagsOld.equals(allTagsNew)) {
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return changed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -1181,6 +1196,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
throw new NotImplementedException("");
|
throw new NotImplementedException("");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
|
||||||
|
assert theInput != theToRemove;
|
||||||
|
|
||||||
|
if (theInput.isEmpty()) {
|
||||||
|
return theInput;
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayList<T> retVal = new ArrayList<>(theInput);
|
||||||
|
retVal.removeAll(theToRemove);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
public void setEntityManager(EntityManager theEntityManager) {
|
public void setEntityManager(EntityManager theEntityManager) {
|
||||||
myEntityManager = theEntityManager;
|
myEntityManager = theEntityManager;
|
||||||
}
|
}
|
||||||
|
@ -1222,28 +1249,33 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
return toResource(resourceType, theEntity, theForHistoryOperation);
|
return toResource(resourceType, theEntity, theForHistoryOperation);
|
||||||
}
|
}
|
||||||
|
|
||||||
// protected ResourceTable toEntity(IResource theResource) {
|
|
||||||
// ResourceTable retVal = new ResourceTable();
|
|
||||||
//
|
|
||||||
// populateResourceIntoEntity(theResource, retVal, true);
|
|
||||||
//
|
|
||||||
// return retVal;
|
|
||||||
// }
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation) {
|
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation) {
|
||||||
|
|
||||||
|
ResourceHistoryTable history;
|
||||||
|
if (theEntity instanceof ResourceHistoryTable) {
|
||||||
|
history = (ResourceHistoryTable) theEntity;
|
||||||
|
} else {
|
||||||
|
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] resourceBytes = history.getResource();
|
||||||
|
ResourceEncodingEnum resourceEncoding = history.getEncoding();
|
||||||
|
|
||||||
String resourceText = null;
|
String resourceText = null;
|
||||||
switch (theEntity.getEncoding()) {
|
switch (resourceEncoding) {
|
||||||
case JSON:
|
case JSON:
|
||||||
try {
|
try {
|
||||||
resourceText = new String(theEntity.getResource(), "UTF-8");
|
resourceText = new String(resourceBytes, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
throw new Error("Should not happen", e);
|
throw new Error("Should not happen", e);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case JSONC:
|
case JSONC:
|
||||||
resourceText = GZipUtil.decompress(theEntity.getResource());
|
resourceText = GZipUtil.decompress(resourceBytes);
|
||||||
|
break;
|
||||||
|
case DEL:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1267,27 +1299,34 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
IParser parser = theEntity.getEncoding().newParser(getContext(theEntity.getFhirVersion()));
|
|
||||||
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
|
|
||||||
|
|
||||||
R retVal;
|
R retVal;
|
||||||
try {
|
if (resourceEncoding != ResourceEncodingEnum.DEL) {
|
||||||
retVal = parser.parseResource(resourceType, resourceText);
|
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
|
||||||
} catch (Exception e) {
|
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
|
||||||
StringBuilder b = new StringBuilder();
|
|
||||||
b.append("Failed to parse database resource[");
|
try {
|
||||||
b.append(resourceType);
|
retVal = parser.parseResource(resourceType, resourceText);
|
||||||
b.append("/");
|
} catch (Exception e) {
|
||||||
b.append(theEntity.getIdDt().getIdPart());
|
StringBuilder b = new StringBuilder();
|
||||||
b.append(" (pid ");
|
b.append("Failed to parse database resource[");
|
||||||
b.append(theEntity.getId());
|
b.append(resourceType);
|
||||||
b.append(", version ");
|
b.append("/");
|
||||||
b.append(theEntity.getFhirVersion().name());
|
b.append(theEntity.getIdDt().getIdPart());
|
||||||
b.append("): ");
|
b.append(" (pid ");
|
||||||
b.append(e.getMessage());
|
b.append(theEntity.getId());
|
||||||
String msg = b.toString();
|
b.append(", version ");
|
||||||
ourLog.error(msg, e);
|
b.append(theEntity.getFhirVersion().name());
|
||||||
throw new DataFormatException(msg, e);
|
b.append("): ");
|
||||||
|
b.append(e.getMessage());
|
||||||
|
String msg = b.toString();
|
||||||
|
ourLog.error(msg, e);
|
||||||
|
throw new DataFormatException(msg, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (retVal instanceof IResource) {
|
if (retVal instanceof IResource) {
|
||||||
|
@ -1297,6 +1336,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
IAnyResource res = (IAnyResource) retVal;
|
IAnyResource res = (IAnyResource) retVal;
|
||||||
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
|
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1304,11 +1345,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
return myContext.getResourceDefinition(theResourceType).getName();
|
return myContext.getResourceDefinition(theResourceType).getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String toResourceName(IBaseResource theResource) {
|
String toResourceName(IBaseResource theResource) {
|
||||||
return myContext.getResourceDefinition(theResource).getName();
|
return myContext.getResourceDefinition(theResource).getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Long translateForcedIdToPid(String theResourceName, String theResourceId) {
|
Long translateForcedIdToPid(String theResourceName, String theResourceId) {
|
||||||
return translateForcedIdToPids(new IdDt(theResourceName, theResourceId), myForcedIdDao).get(0);
|
return translateForcedIdToPids(new IdDt(theResourceName, theResourceId), myForcedIdDao).get(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1316,7 +1357,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
return translateForcedIdToPids(theId, myForcedIdDao);
|
return translateForcedIdToPids(theId, myForcedIdDao);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String translatePidIdToForcedId(String theResourceType, Long theId) {
|
private String translatePidIdToForcedId(String theResourceType, Long theId) {
|
||||||
ForcedId forcedId = myForcedIdDao.findByResourcePid(theId);
|
ForcedId forcedId = myForcedIdDao.findByResourcePid(theId);
|
||||||
if (forcedId != null) {
|
if (forcedId != null) {
|
||||||
return forcedId.getResourceType() + '/' + forcedId.getForcedId();
|
return forcedId.getResourceType() + '/' + forcedId.getForcedId();
|
||||||
|
@ -1399,7 +1440,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
Set<ResourceLink> links = null;
|
Set<ResourceLink> links = null;
|
||||||
|
|
||||||
Set<String> populatedResourceLinkParameters = Collections.emptySet();
|
Set<String> populatedResourceLinkParameters = Collections.emptySet();
|
||||||
boolean changed;
|
EncodedResource changed;
|
||||||
if (theDeletedTimestampOrNull != null) {
|
if (theDeletedTimestampOrNull != null) {
|
||||||
|
|
||||||
stringParams = Collections.emptySet();
|
stringParams = Collections.emptySet();
|
||||||
|
@ -1417,7 +1458,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
theEntity.setNarrativeTextParsedIntoWords(null);
|
theEntity.setNarrativeTextParsedIntoWords(null);
|
||||||
theEntity.setContentTextParsedIntoWords(null);
|
theEntity.setContentTextParsedIntoWords(null);
|
||||||
theEntity.setHashSha256(null);
|
theEntity.setHashSha256(null);
|
||||||
changed = true;
|
changed = populateResourceIntoEntity(theResource, theEntity, true);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
@ -1551,7 +1592,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
theEntity.setParamsUriPopulated(uriParams.isEmpty() == false);
|
theEntity.setParamsUriPopulated(uriParams.isEmpty() == false);
|
||||||
theEntity.setParamsCoords(coordsParams);
|
theEntity.setParamsCoords(coordsParams);
|
||||||
theEntity.setParamsCoordsPopulated(coordsParams.isEmpty() == false);
|
theEntity.setParamsCoordsPopulated(coordsParams.isEmpty() == false);
|
||||||
// theEntity.setParamsCompositeStringUnique(compositeStringUniques);
|
|
||||||
theEntity.setParamsCompositeStringUniquePresent(compositeStringUniques.isEmpty() == false);
|
theEntity.setParamsCompositeStringUniquePresent(compositeStringUniques.isEmpty() == false);
|
||||||
theEntity.setResourceLinks(links);
|
theEntity.setResourceLinks(links);
|
||||||
theEntity.setHasLinks(links.isEmpty() == false);
|
theEntity.setHasLinks(links.isEmpty() == false);
|
||||||
|
@ -1570,7 +1610,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!changed && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
|
if (!changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
|
||||||
ourLog.info("Resource {} has not changed", theEntity.getIdDt().toUnqualified().getValue());
|
ourLog.info("Resource {} has not changed", theEntity.getIdDt().toUnqualified().getValue());
|
||||||
if (theResource != null) {
|
if (theResource != null) {
|
||||||
populateResourceIdFromEntity(theEntity, theResource);
|
populateResourceIdFromEntity(theEntity, theResource);
|
||||||
|
@ -1631,7 +1671,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
* Create history entry
|
* Create history entry
|
||||||
*/
|
*/
|
||||||
if (theCreateNewHistoryEntry) {
|
if (theCreateNewHistoryEntry) {
|
||||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(null);
|
final ResourceHistoryTable historyEntry = theEntity.toHistory();
|
||||||
|
historyEntry.setEncoding(changed.getEncoding());
|
||||||
|
historyEntry.setResource(changed.getResource());
|
||||||
|
|
||||||
ourLog.info("Saving history entry {}", historyEntry.getIdDt());
|
ourLog.info("Saving history entry {}", historyEntry.getIdDt());
|
||||||
myResourceHistoryTableDao.save(historyEntry);
|
myResourceHistoryTableDao.save(historyEntry);
|
||||||
|
@ -1736,19 +1778,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
return theEntity;
|
return theEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
|
|
||||||
assert theInput != theToRemove;
|
|
||||||
|
|
||||||
if (theInput.isEmpty()) {
|
|
||||||
return theInput;
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayList<T> retVal = new ArrayList<>(theInput);
|
|
||||||
retVal.removeAll(theToRemove);
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
|
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
|
||||||
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
|
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
|
||||||
}
|
}
|
||||||
|
@ -1891,19 +1920,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
* parameters across a set of search parameters. An example of why
|
* parameters across a set of search parameters. An example of why
|
||||||
* this is needed:
|
* this is needed:
|
||||||
* <p>
|
* <p>
|
||||||
* Let's say we have a unique index on (Patient:gender AND Patient:name).
|
* Let's say we have a unique index on (Patient:gender AND Patient:name).
|
||||||
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
|
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
|
||||||
* </p>
|
* </p>
|
||||||
* <p>
|
* <p>
|
||||||
* In this case, because the name parameter matches both first and last name,
|
* In this case, because the name parameter matches both first and last name,
|
||||||
* we now need two unique indexes:
|
* we now need two unique indexes:
|
||||||
* <ul>
|
* <ul>
|
||||||
* <li>Patient?gender=male&name=SMITH</li>
|
* <li>Patient?gender=male&name=SMITH</li>
|
||||||
* <li>Patient?gender=male&name=JOHN</li>
|
* <li>Patient?gender=male&name=JOHN</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* </p>
|
* </p>
|
||||||
* <p>
|
* <p>
|
||||||
* So this recursive algorithm calculates those
|
* So this recursive algorithm calculates those
|
||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* @param theResourceType E.g. <code>Patient
|
* @param theResourceType E.g. <code>Patient
|
||||||
|
@ -1929,8 +1958,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
Collections.sort(thePartsChoices, new Comparator<List<String>>() {
|
Collections.sort(thePartsChoices, new Comparator<List<String>>() {
|
||||||
@Override
|
@Override
|
||||||
public int compare(List<String> o1, List<String> o2) {
|
public int compare(List<String> o1, List<String> o2) {
|
||||||
String str1=null;
|
String str1 = null;
|
||||||
String str2=null;
|
String str2 = null;
|
||||||
if (o1.size() > 0) {
|
if (o1.size() > 0) {
|
||||||
str1 = o1.get(0);
|
str1 = o1.get(0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
||||||
|
|
||||||
|
class EncodedResource {
|
||||||
|
|
||||||
|
private boolean myChanged;
|
||||||
|
private byte[] myResource;
|
||||||
|
private ResourceEncodingEnum myEncoding;
|
||||||
|
|
||||||
|
public ResourceEncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] getResource() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResource(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isChanged() {
|
||||||
|
return myChanged;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setChanged(boolean theChanged) {
|
||||||
|
myChanged = theChanged;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -36,11 +36,6 @@ public abstract class BaseHasResource {
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
private Date myDeleted;
|
private Date myDeleted;
|
||||||
|
|
||||||
@Column(name = "RES_ENCODING", nullable = false, length = 5)
|
|
||||||
@Enumerated(EnumType.STRING)
|
|
||||||
@OptimisticLock(excluded = true)
|
|
||||||
private ResourceEncodingEnum myEncoding;
|
|
||||||
|
|
||||||
@Column(name = "RES_VERSION", nullable = true, length = 7)
|
@Column(name = "RES_VERSION", nullable = true, length = 7)
|
||||||
@Enumerated(EnumType.STRING)
|
@Enumerated(EnumType.STRING)
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
|
@ -60,11 +55,6 @@ public abstract class BaseHasResource {
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private Date myPublished;
|
private Date myPublished;
|
||||||
|
|
||||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
|
|
||||||
@Lob()
|
|
||||||
@OptimisticLock(excluded = true)
|
|
||||||
private byte[] myResource;
|
|
||||||
|
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Column(name = "RES_UPDATED", nullable = false)
|
@Column(name = "RES_UPDATED", nullable = false)
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
|
@ -80,13 +70,6 @@ public abstract class BaseHasResource {
|
||||||
myDeleted = theDate;
|
myDeleted = theDate;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResourceEncodingEnum getEncoding() {
|
|
||||||
return myEncoding;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
|
||||||
myEncoding = theEncoding;
|
|
||||||
}
|
|
||||||
|
|
||||||
public FhirVersionEnum getFhirVersion() {
|
public FhirVersionEnum getFhirVersion() {
|
||||||
return myFhirVersion;
|
return myFhirVersion;
|
||||||
|
@ -116,16 +99,8 @@ public abstract class BaseHasResource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPublished(InstantDt thePublished) {
|
public void setPublished(Date thePublished) {
|
||||||
myPublished = thePublished.getValue();
|
myPublished = thePublished;
|
||||||
}
|
|
||||||
|
|
||||||
public byte[] getResource() {
|
|
||||||
return myResource;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResource(byte[] theResource) {
|
|
||||||
myResource = theResource;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract String getResourceType();
|
public abstract String getResourceType();
|
||||||
|
@ -136,8 +111,8 @@ public abstract class BaseHasResource {
|
||||||
return new InstantDt(myUpdated);
|
return new InstantDt(myUpdated);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setUpdated(InstantDt theUpdated) {
|
public void setUpdated(Date theUpdated) {
|
||||||
myUpdated = theUpdated.getValue();
|
myUpdated = theUpdated;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Date getUpdatedDate() {
|
public Date getUpdatedDate() {
|
||||||
|
@ -154,12 +129,12 @@ public abstract class BaseHasResource {
|
||||||
myHasTags = theHasTags;
|
myHasTags = theHasTags;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPublished(Date thePublished) {
|
public void setPublished(InstantDt thePublished) {
|
||||||
myPublished = thePublished;
|
myPublished = thePublished.getValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setUpdated(Date theUpdated) {
|
public void setUpdated(InstantDt theUpdated) {
|
||||||
myUpdated = theUpdated;
|
myUpdated = theUpdated.getValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,11 +25,22 @@ import ca.uhn.fhir.parser.IParser;
|
||||||
|
|
||||||
public enum ResourceEncodingEnum {
|
public enum ResourceEncodingEnum {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* NB: Constants in this enum must be 5 chars long or less!!!
|
||||||
|
*
|
||||||
|
* See ResourceHistoryTable RES_ENCODING column
|
||||||
|
*/
|
||||||
|
|
||||||
/** Json */
|
/** Json */
|
||||||
JSON,
|
JSON,
|
||||||
|
|
||||||
/** Json Compressed */
|
/** Json Compressed */
|
||||||
JSONC;
|
JSONC,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resource was deleted - No contents expected
|
||||||
|
*/
|
||||||
|
DEL;
|
||||||
|
|
||||||
public IParser newParser(FhirContext theContext) {
|
public IParser newParser(FhirContext theContext) {
|
||||||
return theContext.newJsonParser();
|
return theContext.newJsonParser();
|
||||||
|
|
|
@ -20,23 +20,23 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import org.hibernate.annotations.OptimisticLock;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
import javax.persistence.*;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
|
||||||
|
|
||||||
//@formatter:off
|
//@formatter:off
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_RES_VER", uniqueConstraints = {
|
@Table(name = "HFJ_RES_VER", uniqueConstraints = {
|
||||||
@UniqueConstraint(name="IDX_RESVER_ID_VER", columnNames = { "RES_ID", "RES_VER" })
|
@UniqueConstraint(name = "IDX_RESVER_ID_VER", columnNames = {"RES_ID", "RES_VER"})
|
||||||
}, indexes= {
|
}, indexes = {
|
||||||
@Index(name="IDX_RESVER_TYPE_DATE", columnList="RES_TYPE,RES_UPDATED"),
|
@Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"),
|
||||||
@Index(name="IDX_RESVER_ID_DATE", columnList="RES_ID,RES_UPDATED"),
|
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
|
||||||
@Index(name="IDX_RESVER_DATE", columnList="RES_UPDATED")
|
@Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED")
|
||||||
})
|
})
|
||||||
//@formatter:on
|
//@formatter:on
|
||||||
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
|
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
|
||||||
|
@ -61,11 +61,20 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
private Collection<ResourceHistoryTag> myTags;
|
private Collection<ResourceHistoryTag> myTags;
|
||||||
|
|
||||||
|
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
||||||
|
@Lob()
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
|
private byte[] myResource;
|
||||||
|
|
||||||
|
@Column(name = "RES_ENCODING", nullable = false, length = 5)
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
|
private ResourceEncodingEnum myEncoding;
|
||||||
|
|
||||||
public ResourceHistoryTable() {
|
public ResourceHistoryTable() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void addTag(ResourceHistoryTag theTag) {
|
public void addTag(ResourceHistoryTag theTag) {
|
||||||
for (ResourceHistoryTag next : getTags()) {
|
for (ResourceHistoryTag next : getTags()) {
|
||||||
if (next.getTag().equals(theTag)) {
|
if (next.getTag().equals(theTag)) {
|
||||||
|
@ -93,11 +102,23 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
return historyTag;
|
return historyTag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ResourceEncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long getId() {
|
public Long getId() {
|
||||||
return myId;
|
return myId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setId(Long theId) {
|
||||||
|
myId = theId;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IdDt getIdDt() {
|
public IdDt getIdDt() {
|
||||||
if (getForcedId() == null) {
|
if (getForcedId() == null) {
|
||||||
|
@ -108,15 +129,31 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getResource() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResource(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
public Long getResourceId() {
|
public Long getResourceId() {
|
||||||
return myResourceId;
|
return myResourceId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setResourceId(Long theResourceId) {
|
||||||
|
myResourceId = theResourceId;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getResourceType() {
|
public String getResourceType() {
|
||||||
return myResourceType;
|
return myResourceType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setResourceType(String theResourceType) {
|
||||||
|
myResourceType = theResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<ResourceHistoryTag> getTags() {
|
public Collection<ResourceHistoryTag> getTags() {
|
||||||
if (myTags == null) {
|
if (myTags == null) {
|
||||||
|
@ -130,6 +167,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
return myResourceVersion;
|
return myResourceVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setVersion(long theVersion) {
|
||||||
|
myResourceVersion = theVersion;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean hasTag(String theTerm, String theScheme) {
|
public boolean hasTag(String theTerm, String theScheme) {
|
||||||
for (ResourceHistoryTag next : getTags()) {
|
for (ResourceHistoryTag next : getTags()) {
|
||||||
if (next.getTag().getSystem().equals(theScheme) && next.getTag().getCode().equals(theTerm)) {
|
if (next.getTag().getSystem().equals(theScheme) && next.getTag().getCode().equals(theTerm)) {
|
||||||
|
@ -139,20 +180,4 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setId(Long theId) {
|
|
||||||
myId = theId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResourceId(Long theResourceId) {
|
|
||||||
myResourceId = theResourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResourceType(String theResourceType) {
|
|
||||||
myResourceType = theResourceType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setVersion(long theVersion) {
|
|
||||||
myResourceVersion = theVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
import org.hibernate.annotations.OptimisticLock;
|
import org.hibernate.annotations.OptimisticLock;
|
||||||
|
@ -39,7 +40,6 @@ import java.util.Set;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
|
|
||||||
//@formatter:off
|
|
||||||
@Indexed(interceptor = IndexNonDeletedInterceptor.class)
|
@Indexed(interceptor = IndexNonDeletedInterceptor.class)
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
|
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
|
||||||
|
@ -49,13 +49,18 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
@Index(name = "IDX_RES_TYPE", columnList = "RES_TYPE"),
|
@Index(name = "IDX_RES_TYPE", columnList = "RES_TYPE"),
|
||||||
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
|
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
|
||||||
})
|
})
|
||||||
//@formatter:on
|
|
||||||
public class ResourceTable extends BaseHasResource implements Serializable {
|
public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
static final int RESTYPE_LEN = 30;
|
static final int RESTYPE_LEN = 30;
|
||||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||||
private static final int MAX_PROFILE_LENGTH = 200;
|
private static final int MAX_PROFILE_LENGTH = 200;
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
// @Transient
|
||||||
|
// private transient byte[] myResource;
|
||||||
|
//
|
||||||
|
// @Transient
|
||||||
|
// private transient ResourceEncodingEnum myEncoding;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
||||||
*/
|
*/
|
||||||
|
@ -214,6 +219,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
return tag;
|
return tag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// public ResourceEncodingEnum getEncoding() {
|
||||||
|
// Validate.notNull(myEncoding, "myEncoding is null");
|
||||||
|
// return myEncoding;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||||
|
// myEncoding = theEncoding;
|
||||||
|
// }
|
||||||
|
|
||||||
public String getHashSha256() {
|
public String getHashSha256() {
|
||||||
return myHashSha256;
|
return myHashSha256;
|
||||||
}
|
}
|
||||||
|
@ -387,6 +401,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
myProfile = theProfile;
|
myProfile = theProfile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// public byte[] getResource() {
|
||||||
|
// Validate.notNull(myEncoding, "myEncoding is null");
|
||||||
|
// return myResource;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public void setResource(byte[] theResource) {
|
||||||
|
// myResource = theResource;
|
||||||
|
// }
|
||||||
|
|
||||||
public Collection<ResourceLink> getResourceLinks() {
|
public Collection<ResourceLink> getResourceLinks() {
|
||||||
if (myResourceLinks == null) {
|
if (myResourceLinks == null) {
|
||||||
myResourceLinks = new ArrayList<>();
|
myResourceLinks = new ArrayList<>();
|
||||||
|
@ -527,8 +550,8 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
myNarrativeText = theNarrativeText;
|
myNarrativeText = theNarrativeText;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResourceHistoryTable toHistory(ResourceHistoryTable theResourceHistoryTable) {
|
public ResourceHistoryTable toHistory() {
|
||||||
ResourceHistoryTable retVal = theResourceHistoryTable != null ? theResourceHistoryTable : new ResourceHistoryTable();
|
ResourceHistoryTable retVal = new ResourceHistoryTable();
|
||||||
|
|
||||||
retVal.setResourceId(myId);
|
retVal.setResourceId(myId);
|
||||||
retVal.setResourceType(myResourceType);
|
retVal.setResourceType(myResourceType);
|
||||||
|
@ -536,9 +559,9 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
|
|
||||||
retVal.setPublished(getPublished());
|
retVal.setPublished(getPublished());
|
||||||
retVal.setUpdated(getUpdated());
|
retVal.setUpdated(getUpdated());
|
||||||
retVal.setEncoding(getEncoding());
|
// retVal.setEncoding(getEncoding());
|
||||||
retVal.setFhirVersion(getFhirVersion());
|
retVal.setFhirVersion(getFhirVersion());
|
||||||
retVal.setResource(getResource());
|
// retVal.setResource(getResource());
|
||||||
retVal.setDeleted(getDeleted());
|
retVal.setDeleted(getDeleted());
|
||||||
retVal.setForcedId(getForcedId());
|
retVal.setForcedId(getForcedId());
|
||||||
|
|
||||||
|
|
|
@ -134,78 +134,6 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testReindexing() {
|
|
||||||
Patient p = new Patient();
|
|
||||||
p.addName().addFamily("family");
|
|
||||||
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
ValueSet vs = new ValueSet();
|
|
||||||
vs.setUrl("http://foo");
|
|
||||||
myValueSetDao.create(vs, mySrd);
|
|
||||||
|
|
||||||
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(1), entity.getIndexStatus());
|
|
||||||
|
|
||||||
mySystemDao.markAllResourcesForReindexing();
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(null, entity.getIndexStatus());
|
|
||||||
|
|
||||||
mySystemDao.performReindexingPass(null);
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(1), entity.getIndexStatus());
|
|
||||||
|
|
||||||
// Just make sure this doesn't cause a choke
|
|
||||||
mySystemDao.performReindexingPass(100000);
|
|
||||||
|
|
||||||
// Try making the resource unparseable
|
|
||||||
|
|
||||||
TransactionTemplate template = new TransactionTemplate(myTxManager);
|
|
||||||
template.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
|
|
||||||
template.execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
|
||||||
table.setIndexStatus(null);
|
|
||||||
try {
|
|
||||||
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
|
||||||
} catch (UnsupportedEncodingException e) {
|
|
||||||
throw new Error(e);
|
|
||||||
}
|
|
||||||
myEntityManager.merge(table);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
mySystemDao.performReindexingPass(null);
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(2), entity.getIndexStatus());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSystemMetaOperation() {
|
public void testSystemMetaOperation() {
|
||||||
|
|
|
@ -2508,36 +2508,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Can we handle content that was previously saved containing vocabulary that
|
|
||||||
* is no longer valid
|
|
||||||
*/
|
|
||||||
@Test
|
|
||||||
public void testResourceInDatabaseContainsInvalidVocabulary() {
|
|
||||||
final Patient p = new Patient();
|
|
||||||
p.setGender(AdministrativeGender.MALE);
|
|
||||||
final IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
TransactionTemplate tx = new TransactionTemplate(myTxManager);
|
|
||||||
tx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
|
||||||
tx.execute(new TransactionCallbackWithoutResult() {
|
|
||||||
@Override
|
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
|
||||||
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
|
||||||
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
|
||||||
newContent = newContent.replace("male", "foo");
|
|
||||||
table.setResource(newContent.getBytes(Charsets.UTF_8));
|
|
||||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
|
||||||
myResourceTableDao.save(table);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Patient read = myPatientDao.read(id);
|
|
||||||
String string = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(read);
|
|
||||||
ourLog.info(string);
|
|
||||||
assertThat(string, containsString("value=\"foo\""));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testResourceInstanceMetaOperation() {
|
public void testResourceInstanceMetaOperation() {
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,6 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||||
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||||
|
@ -26,15 +24,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.springframework.transaction.TransactionDefinition;
|
|
||||||
import org.springframework.transaction.TransactionStatus;
|
import org.springframework.transaction.TransactionStatus;
|
||||||
import org.springframework.transaction.support.TransactionCallback;
|
|
||||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.UnsupportedEncodingException;
|
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -444,79 +439,6 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testReindexing() {
|
|
||||||
Patient p = new Patient();
|
|
||||||
p.addName().setFamily("family");
|
|
||||||
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
ValueSet vs = new ValueSet();
|
|
||||||
vs.setUrl("http://foo");
|
|
||||||
myValueSetDao.create(vs, mySrd);
|
|
||||||
|
|
||||||
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(1), entity.getIndexStatus());
|
|
||||||
|
|
||||||
mySystemDao.markAllResourcesForReindexing();
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(null, entity.getIndexStatus());
|
|
||||||
|
|
||||||
mySystemDao.performReindexingPass(null);
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(1), entity.getIndexStatus());
|
|
||||||
|
|
||||||
// Just make sure this doesn't cause a choke
|
|
||||||
mySystemDao.performReindexingPass(100000);
|
|
||||||
|
|
||||||
// Try making the resource unparseable
|
|
||||||
|
|
||||||
TransactionTemplate template = new TransactionTemplate(myTxManager);
|
|
||||||
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
|
||||||
template.execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
|
||||||
table.setIndexStatus(null);
|
|
||||||
try {
|
|
||||||
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
|
||||||
} catch (UnsupportedEncodingException e) {
|
|
||||||
throw new Error(e);
|
|
||||||
}
|
|
||||||
myEntityManager.merge(table);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
mySystemDao.performReindexingPass(null);
|
|
||||||
|
|
||||||
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
|
|
||||||
@Override
|
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
|
||||||
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertEquals(Long.valueOf(2), entity.getIndexStatus());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSystemMetaOperation() {
|
public void testSystemMetaOperation() {
|
||||||
|
|
||||||
|
|
|
@ -178,6 +178,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceTableDao myResourceTableDao;
|
protected IResourceTableDao myResourceTableDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||||
|
@Autowired
|
||||||
protected IResourceTagDao myResourceTagDao;
|
protected IResourceTagDao myResourceTagDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||||
|
|
|
@ -25,7 +25,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCreateWithUuidResourceStrategy() throws Exception {
|
public void testCreateWithUuidResourceStrategy() {
|
||||||
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
|
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
|
@ -40,7 +40,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTransactionCreateWithUuidResourceStrategy() throws Exception {
|
public void testTransactionCreateWithUuidResourceStrategy() {
|
||||||
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
|
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
|
||||||
|
|
||||||
Organization org = new Organization();
|
Organization org = new Organization();
|
||||||
|
|
|
@ -2565,12 +2565,12 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
||||||
tx.execute(new TransactionCallbackWithoutResult() {
|
tx.execute(new TransactionCallbackWithoutResult() {
|
||||||
@Override
|
@Override
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
|
||||||
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
||||||
newContent = newContent.replace("male", "foo");
|
newContent = newContent.replace("male", "foo");
|
||||||
table.setResource(newContent.getBytes(Charsets.UTF_8));
|
table.setResource(newContent.getBytes(Charsets.UTF_8));
|
||||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
table.setEncoding(ResourceEncodingEnum.JSON);
|
||||||
myResourceTableDao.save(table);
|
myResourceHistoryTableDao.save(table);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,8 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
|
||||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
@ -26,6 +24,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||||
import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
|
import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.transaction.TransactionDefinition;
|
import org.springframework.transaction.TransactionDefinition;
|
||||||
import org.springframework.transaction.TransactionStatus;
|
import org.springframework.transaction.TransactionStatus;
|
||||||
import org.springframework.transaction.support.TransactionCallback;
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
|
@ -443,7 +442,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
public void testReindexing() {
|
public void testReindexing() {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.addName().setFamily("family");
|
p.addName().setFamily("family");
|
||||||
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
|
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualified();
|
||||||
|
|
||||||
ValueSet vs = new ValueSet();
|
ValueSet vs = new ValueSet();
|
||||||
vs.setUrl("http://foo");
|
vs.setUrl("http://foo");
|
||||||
|
@ -487,15 +486,19 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
template.execute(new TransactionCallback<ResourceTable>() {
|
template.execute(new TransactionCallback<ResourceTable>() {
|
||||||
@Override
|
@Override
|
||||||
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
public ResourceTable doInTransaction(TransactionStatus theStatus) {
|
||||||
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
|
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
|
||||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
|
||||||
table.setIndexStatus(null);
|
|
||||||
try {
|
try {
|
||||||
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
throw new Error(e);
|
throw new Error(e);
|
||||||
}
|
}
|
||||||
myEntityManager.merge(table);
|
myResourceHistoryTableDao.save(resourceHistoryTable);
|
||||||
|
|
||||||
|
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
|
||||||
|
table.setIndexStatus(null);
|
||||||
|
myResourceTableDao.save(table);
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
8
pom.xml
8
pom.xml
|
@ -1026,6 +1026,14 @@
|
||||||
<build>
|
<build>
|
||||||
<pluginManagement>
|
<pluginManagement>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>com.gemnasium</groupId>
|
||||||
|
<artifactId>gemnasium-maven-plugin</artifactId>
|
||||||
|
<version>0.2.0</version>
|
||||||
|
<configuration>
|
||||||
|
<projectSlug>github.com/jamesagnew/hapi-fhir</projectSlug>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.basepom.maven</groupId>
|
<groupId>org.basepom.maven</groupId>
|
||||||
<artifactId>duplicate-finder-maven-plugin</artifactId>
|
<artifactId>duplicate-finder-maven-plugin</artifactId>
|
||||||
|
|
|
@ -19,6 +19,15 @@
|
||||||
and support for draft pre-release versions of FHIR are shown in
|
and support for draft pre-release versions of FHIR are shown in
|
||||||
<span style="background: #EEB; padding: 3px;">YELLOW</span>.
|
<span style="background: #EEB; padding: 3px;">YELLOW</span>.
|
||||||
</p>
|
</p>
|
||||||
|
<p>
|
||||||
|
Note also that after the release of the FHIR DSTU2 specification, the FHIR
|
||||||
|
standard itself stopped using the DSTUx naming scheme, in favour or naming
|
||||||
|
new releases STUx or simply Rx. Because HAPI FHIR already had draft support
|
||||||
|
for what was then called DSTU3 at this time, we did not update our naming
|
||||||
|
conventions until R4 in order to avoid breaking existing users' code.
|
||||||
|
From the perspective of a user of HAPI FHIR, consider the terms
|
||||||
|
DSTU3 / STU3 / R3 to be interchangeable.
|
||||||
|
</p>
|
||||||
<table>
|
<table>
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
|
@ -135,6 +144,14 @@
|
||||||
<td style="background: #CEC;">3.0.1</td>
|
<td style="background: #CEC;">3.0.1</td>
|
||||||
<td style="background: #EEB;">Draft 3.1.0-12370</td>
|
<td style="background: #EEB;">Draft 3.1.0-12370</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>HAPI FHIR 3.1.0</td>
|
||||||
|
<td style="background: #DDD;"></td>
|
||||||
|
<td style="background: #CEC;">1.0.2</td>
|
||||||
|
<td style="background: #EEB;">Draft 1.4.0</td>
|
||||||
|
<td style="background: #CEC;">3.0.1</td>
|
||||||
|
<td style="background: #EEB;">Draft 3.1.0-12370</td>
|
||||||
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue