Start work on removing duplicate resource body

This commit is contained in:
James Agnew 2018-01-04 14:45:17 -05:00
parent b18e71d4f5
commit 2f5ce64755
15 changed files with 360 additions and 412 deletions

View File

@ -911,102 +911,117 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
/**
* Returns true if the resource has changed (either the contents or the tags)
*/
protected boolean populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
theEntity.setResourceType(toResourceName(theResource));
protected EncodedResource populateResourceIntoEntity(IBaseResource theResource, ResourceTable theEntity, boolean theUpdateHash) {
if (theEntity.getResourceType() == null) {
theEntity.setResourceType(toResourceName(theResource));
}
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
for (BaseResourceReferenceDt nextRef : refs) {
if (nextRef.getReference().isEmpty() == false) {
if (nextRef.getReference().hasVersionIdPart()) {
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
if (theResource != null) {
List<BaseResourceReferenceDt> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(theResource, BaseResourceReferenceDt.class);
for (BaseResourceReferenceDt nextRef : refs) {
if (nextRef.getReference().isEmpty() == false) {
if (nextRef.getReference().hasVersionIdPart()) {
nextRef.setReference(nextRef.getReference().toUnqualifiedVersionless());
}
}
}
}
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
IParser parser = encoding.newParser(myContext);
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
String encoded = parser.encodeResourceToString(theResource);
theEntity.setEncoding(encoding);
theEntity.setFhirVersion(myContext.getVersion().getVersion());
byte[] bytes;
switch (encoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
default:
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
}
ResourceEncodingEnum encoding;
boolean changed = false;
if (theUpdateHash) {
HashFunction sha256 = Hashing.sha256();
String hashSha256 = sha256.hashBytes(bytes).toString();
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
if (theEntity.getDeleted() == null) {
encoding = myConfig.getResourceEncoding();
IParser parser = encoding.newParser(myContext);
parser.setDontEncodeElements(EXCLUDE_ELEMENTS_IN_ENCODED);
String encoded = parser.encodeResourceToString(theResource);
theEntity.setFhirVersion(myContext.getVersion().getVersion());
switch (encoding) {
case JSON:
bytes = encoded.getBytes(Charsets.UTF_8);
break;
default:
case JSONC:
bytes = GZipUtil.compress(encoded);
break;
}
if (theUpdateHash) {
HashFunction sha256 = Hashing.sha256();
String hashSha256 = sha256.hashBytes(bytes).toString();
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
changed = true;
}
theEntity.setHashSha256(hashSha256);
}
Set<TagDefinition> allDefs = new HashSet<>();
theEntity.setHasTags(false);
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
if (theResource instanceof IResource) {
extractTagsHapi((IResource) theResource, theEntity, allDefs);
} else {
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
}
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
if (def.isStandardType() == false) {
String profile = def.getResourceProfile("");
if (isNotBlank(profile)) {
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
if (tag != null) {
allDefs.add(tag);
theEntity.addTag(tag);
theEntity.setHasTags(true);
}
}
}
ArrayList<ResourceTag> existingTags = new ArrayList<>();
if (theEntity.isHasTags()) {
existingTags.addAll(theEntity.getTags());
}
for (ResourceTag next : existingTags) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
theEntity.getTags().remove(next);
}
}
}
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
theEntity.setHashSha256(hashSha256);
} else {
theEntity.setHashSha256(null);
bytes = null;
encoding = ResourceEncodingEnum.DEL;
}
if (changed == false) {
if (theEntity.getResource() == null) {
ResourceHistoryTable currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
if (currentHistoryVersion == null || currentHistoryVersion.getResource() == null) {
changed = true;
} else {
changed = !Arrays.equals(theEntity.getResource(), bytes);
changed = !Arrays.equals(currentHistoryVersion.getResource(), bytes);
}
}
theEntity.setResource(bytes);
EncodedResource retVal = new EncodedResource();
retVal.setEncoding(encoding);
retVal.setResource(bytes);
retVal.setChanged(changed);
Set<TagDefinition> allDefs = new HashSet<TagDefinition>();
theEntity.setHasTags(false);
Set<TagDefinition> allTagsOld = getAllTagDefinitions(theEntity);
if (theResource instanceof IResource) {
extractTagsHapi((IResource) theResource, theEntity, allDefs);
} else {
extractTagsRi((IAnyResource) theResource, theEntity, allDefs);
}
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
if (def.isStandardType() == false) {
String profile = def.getResourceProfile("");
if (isNotBlank(profile)) {
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
if (tag != null) {
allDefs.add(tag);
theEntity.addTag(tag);
theEntity.setHasTags(true);
}
}
}
ArrayList<ResourceTag> existingTags = new ArrayList<ResourceTag>();
if (theEntity.isHasTags()) {
existingTags.addAll(theEntity.getTags());
}
for (ResourceTag next : existingTags) {
TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) {
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
theEntity.getTags().remove(next);
}
}
}
Set<TagDefinition> allTagsNew = getAllTagDefinitions(theEntity);
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
return changed;
return retVal;
}
@SuppressWarnings("unchecked")
@ -1181,6 +1196,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
throw new NotImplementedException("");
}
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
assert theInput != theToRemove;
if (theInput.isEmpty()) {
return theInput;
}
ArrayList<T> retVal = new ArrayList<>(theInput);
retVal.removeAll(theToRemove);
return retVal;
}
public void setEntityManager(EntityManager theEntityManager) {
myEntityManager = theEntityManager;
}
@ -1222,28 +1249,33 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return toResource(resourceType, theEntity, theForHistoryOperation);
}
// protected ResourceTable toEntity(IResource theResource) {
// ResourceTable retVal = new ResourceTable();
//
// populateResourceIntoEntity(theResource, retVal, true);
//
// return retVal;
// }
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation) {
ResourceHistoryTable history;
if (theEntity instanceof ResourceHistoryTable) {
history = (ResourceHistoryTable) theEntity;
} else {
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
}
byte[] resourceBytes = history.getResource();
ResourceEncodingEnum resourceEncoding = history.getEncoding();
String resourceText = null;
switch (theEntity.getEncoding()) {
switch (resourceEncoding) {
case JSON:
try {
resourceText = new String(theEntity.getResource(), "UTF-8");
resourceText = new String(resourceBytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new Error("Should not happen", e);
}
break;
case JSONC:
resourceText = GZipUtil.decompress(theEntity.getResource());
resourceText = GZipUtil.decompress(resourceBytes);
break;
case DEL:
break;
}
@ -1267,27 +1299,34 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
}
IParser parser = theEntity.getEncoding().newParser(getContext(theEntity.getFhirVersion()));
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
R retVal;
try {
retVal = parser.parseResource(resourceType, resourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(resourceType);
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(msg, e);
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
parser.setParserErrorHandler(new LenientErrorHandler(false).setErrorOnInvalidValue(false));
try {
retVal = parser.parseResource(resourceType, resourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
b.append(resourceType);
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
b.append(theEntity.getId());
b.append(", version ");
b.append(theEntity.getFhirVersion().name());
b.append("): ");
b.append(e.getMessage());
String msg = b.toString();
ourLog.error(msg, e);
throw new DataFormatException(msg, e);
}
} else {
retVal = (R) myContext.getResourceDefinition(theEntity.getResourceType()).newInstance();
}
if (retVal instanceof IResource) {
@ -1297,6 +1336,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
IAnyResource res = (IAnyResource) retVal;
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
}
return retVal;
}
@ -1304,11 +1345,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return myContext.getResourceDefinition(theResourceType).getName();
}
protected String toResourceName(IBaseResource theResource) {
String toResourceName(IBaseResource theResource) {
return myContext.getResourceDefinition(theResource).getName();
}
protected Long translateForcedIdToPid(String theResourceName, String theResourceId) {
Long translateForcedIdToPid(String theResourceName, String theResourceId) {
return translateForcedIdToPids(new IdDt(theResourceName, theResourceId), myForcedIdDao).get(0);
}
@ -1316,7 +1357,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return translateForcedIdToPids(theId, myForcedIdDao);
}
protected String translatePidIdToForcedId(String theResourceType, Long theId) {
private String translatePidIdToForcedId(String theResourceType, Long theId) {
ForcedId forcedId = myForcedIdDao.findByResourcePid(theId);
if (forcedId != null) {
return forcedId.getResourceType() + '/' + forcedId.getForcedId();
@ -1399,7 +1440,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
Set<ResourceLink> links = null;
Set<String> populatedResourceLinkParameters = Collections.emptySet();
boolean changed;
EncodedResource changed;
if (theDeletedTimestampOrNull != null) {
stringParams = Collections.emptySet();
@ -1417,7 +1458,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
theEntity.setHashSha256(null);
changed = true;
changed = populateResourceIntoEntity(theResource, theEntity, true);
} else {
@ -1551,7 +1592,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setParamsUriPopulated(uriParams.isEmpty() == false);
theEntity.setParamsCoords(coordsParams);
theEntity.setParamsCoordsPopulated(coordsParams.isEmpty() == false);
// theEntity.setParamsCompositeStringUnique(compositeStringUniques);
theEntity.setParamsCompositeStringUniquePresent(compositeStringUniques.isEmpty() == false);
theEntity.setResourceLinks(links);
theEntity.setHasLinks(links.isEmpty() == false);
@ -1570,7 +1610,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
if (!changed && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
if (!changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
ourLog.info("Resource {} has not changed", theEntity.getIdDt().toUnqualified().getValue());
if (theResource != null) {
populateResourceIdFromEntity(theEntity, theResource);
@ -1631,7 +1671,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
* Create history entry
*/
if (theCreateNewHistoryEntry) {
final ResourceHistoryTable historyEntry = theEntity.toHistory(null);
final ResourceHistoryTable historyEntry = theEntity.toHistory();
historyEntry.setEncoding(changed.getEncoding());
historyEntry.setResource(changed.getResource());
ourLog.info("Saving history entry {}", historyEntry.getIdDt());
myResourceHistoryTableDao.save(historyEntry);
@ -1736,19 +1778,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return theEntity;
}
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
assert theInput != theToRemove;
if (theInput.isEmpty()) {
return theInput;
}
ArrayList<T> retVal = new ArrayList<>(theInput);
retVal.removeAll(theToRemove);
return retVal;
}
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
}
@ -1891,19 +1920,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
* parameters across a set of search parameters. An example of why
* this is needed:
* <p>
* Let's say we have a unique index on (Patient:gender AND Patient:name).
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
* Let's say we have a unique index on (Patient:gender AND Patient:name).
* Then we pass in <code>SMITH, John</code> with a gender of <code>male</code>.
* </p>
* <p>
* In this case, because the name parameter matches both first and last name,
* we now need two unique indexes:
* <ul>
* <li>Patient?gender=male&amp;name=SMITH</li>
* <li>Patient?gender=male&amp;name=JOHN</li>
* </ul>
* In this case, because the name parameter matches both first and last name,
* we now need two unique indexes:
* <ul>
* <li>Patient?gender=male&amp;name=SMITH</li>
* <li>Patient?gender=male&amp;name=JOHN</li>
* </ul>
* </p>
* <p>
* So this recursive algorithm calculates those
* So this recursive algorithm calculates those
* </p>
*
* @param theResourceType E.g. <code>Patient
@ -1929,8 +1958,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
Collections.sort(thePartsChoices, new Comparator<List<String>>() {
@Override
public int compare(List<String> o1, List<String> o2) {
String str1=null;
String str2=null;
String str1 = null;
String str2 = null;
if (o1.size() > 0) {
str1 = o1.get(0);
}

View File

@ -0,0 +1,35 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
class EncodedResource {
private boolean myChanged;
private byte[] myResource;
private ResourceEncodingEnum myEncoding;
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
}
public boolean isChanged() {
return myChanged;
}
public void setChanged(boolean theChanged) {
myChanged = theChanged;
}
}

View File

@ -36,11 +36,6 @@ public abstract class BaseHasResource {
@Temporal(TemporalType.TIMESTAMP)
private Date myDeleted;
@Column(name = "RES_ENCODING", nullable = false, length = 5)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
@Column(name = "RES_VERSION", nullable = true, length = 7)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
@ -60,11 +55,6 @@ public abstract class BaseHasResource {
@OptimisticLock(excluded = true)
private Date myPublished;
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
@Lob()
@OptimisticLock(excluded = true)
private byte[] myResource;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_UPDATED", nullable = false)
@OptimisticLock(excluded = true)
@ -80,13 +70,6 @@ public abstract class BaseHasResource {
myDeleted = theDate;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
@ -116,16 +99,8 @@ public abstract class BaseHasResource {
}
}
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
public void setPublished(Date thePublished) {
myPublished = thePublished;
}
public abstract String getResourceType();
@ -136,8 +111,8 @@ public abstract class BaseHasResource {
return new InstantDt(myUpdated);
}
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
public void setUpdated(Date theUpdated) {
myUpdated = theUpdated;
}
public Date getUpdatedDate() {
@ -154,12 +129,12 @@ public abstract class BaseHasResource {
myHasTags = theHasTags;
}
public void setPublished(Date thePublished) {
myPublished = thePublished;
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
public void setUpdated(Date theUpdated) {
myUpdated = theUpdated;
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
}
}

View File

@ -25,11 +25,22 @@ import ca.uhn.fhir.parser.IParser;
public enum ResourceEncodingEnum {
/*
* NB: Constants in this enum must be 5 chars long or less!!!
*
* See ResourceHistoryTable RES_ENCODING column
*/
/** Json */
JSON,
/** Json Compressed */
JSONC;
JSONC,
/**
* Resource was deleted - No contents expected
*/
DEL;
public IParser newParser(FhirContext theContext) {
return theContext.newJsonParser();

View File

@ -20,23 +20,23 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import org.hibernate.annotations.OptimisticLock;
import javax.persistence.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.*;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
//@formatter:off
@Entity
@Table(name = "HFJ_RES_VER", uniqueConstraints = {
@UniqueConstraint(name="IDX_RESVER_ID_VER", columnNames = { "RES_ID", "RES_VER" })
}, indexes= {
@Index(name="IDX_RESVER_TYPE_DATE", columnList="RES_TYPE,RES_UPDATED"),
@Index(name="IDX_RESVER_ID_DATE", columnList="RES_ID,RES_UPDATED"),
@Index(name="IDX_RESVER_DATE", columnList="RES_UPDATED")
@UniqueConstraint(name = "IDX_RESVER_ID_VER", columnNames = {"RES_ID", "RES_VER"})
}, indexes = {
@Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"),
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
@Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED")
})
//@formatter:on
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
@ -61,11 +61,20 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
private Collection<ResourceHistoryTag> myTags;
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
@Lob()
@OptimisticLock(excluded = true)
private byte[] myResource;
@Column(name = "RES_ENCODING", nullable = false, length = 5)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
public ResourceHistoryTable() {
super();
}
public void addTag(ResourceHistoryTag theTag) {
for (ResourceHistoryTag next : getTags()) {
if (next.getTag().equals(theTag)) {
@ -93,11 +102,23 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return historyTag;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding;
}
@Override
public Long getId() {
return myId;
}
public void setId(Long theId) {
myId = theId;
}
@Override
public IdDt getIdDt() {
if (getForcedId() == null) {
@ -108,15 +129,31 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
}
}
public byte[] getResource() {
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
}
public Long getResourceId() {
return myResourceId;
}
public void setResourceId(Long theResourceId) {
myResourceId = theResourceId;
}
@Override
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
@Override
public Collection<ResourceHistoryTag> getTags() {
if (myTags == null) {
@ -130,6 +167,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResourceVersion;
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
public boolean hasTag(String theTerm, String theScheme) {
for (ResourceHistoryTag next : getTags()) {
if (next.getTag().getSystem().equals(theScheme) && next.getTag().getCode().equals(theTerm)) {
@ -139,20 +180,4 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return false;
}
public void setId(Long theId) {
myId = theId;
}
public void setResourceId(Long theResourceId) {
myResourceId = theResourceId;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
}

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.OptimisticLock;
@ -39,7 +40,6 @@ import java.util.Set;
import static org.apache.commons.lang3.StringUtils.defaultString;
//@formatter:off
@Indexed(interceptor = IndexNonDeletedInterceptor.class)
@Entity
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
@ -49,13 +49,18 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
@Index(name = "IDX_RES_TYPE", columnList = "RES_TYPE"),
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
})
//@formatter:on
public class ResourceTable extends BaseHasResource implements Serializable {
static final int RESTYPE_LEN = 30;
private static final int MAX_LANGUAGE_LENGTH = 20;
private static final int MAX_PROFILE_LENGTH = 200;
private static final long serialVersionUID = 1L;
// @Transient
// private transient byte[] myResource;
//
// @Transient
// private transient ResourceEncodingEnum myEncoding;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@ -214,6 +219,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
return tag;
}
// public ResourceEncodingEnum getEncoding() {
// Validate.notNull(myEncoding, "myEncoding is null");
// return myEncoding;
// }
//
// public void setEncoding(ResourceEncodingEnum theEncoding) {
// myEncoding = theEncoding;
// }
public String getHashSha256() {
return myHashSha256;
}
@ -387,6 +401,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myProfile = theProfile;
}
// public byte[] getResource() {
// Validate.notNull(myEncoding, "myEncoding is null");
// return myResource;
// }
//
// public void setResource(byte[] theResource) {
// myResource = theResource;
// }
public Collection<ResourceLink> getResourceLinks() {
if (myResourceLinks == null) {
myResourceLinks = new ArrayList<>();
@ -527,8 +550,8 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myNarrativeText = theNarrativeText;
}
public ResourceHistoryTable toHistory(ResourceHistoryTable theResourceHistoryTable) {
ResourceHistoryTable retVal = theResourceHistoryTable != null ? theResourceHistoryTable : new ResourceHistoryTable();
public ResourceHistoryTable toHistory() {
ResourceHistoryTable retVal = new ResourceHistoryTable();
retVal.setResourceId(myId);
retVal.setResourceType(myResourceType);
@ -536,9 +559,9 @@ public class ResourceTable extends BaseHasResource implements Serializable {
retVal.setPublished(getPublished());
retVal.setUpdated(getUpdated());
retVal.setEncoding(getEncoding());
// retVal.setEncoding(getEncoding());
retVal.setFhirVersion(getFhirVersion());
retVal.setResource(getResource());
// retVal.setResource(getResource());
retVal.setDeleted(getDeleted());
retVal.setForcedId(getForcedId());

View File

@ -134,78 +134,6 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
}
@Test
public void testReindexing() {
Patient p = new Patient();
p.addName().addFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
myValueSetDao.create(vs, mySrd);
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
mySystemDao.markAllResourcesForReindexing();
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(null, entity.getIndexStatus());
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
// Just make sure this doesn't cause a choke
mySystemDao.performReindexingPass(100000);
// Try making the resource unparseable
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
return null;
}
});
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(2), entity.getIndexStatus());
}
@Test
public void testSystemMetaOperation() {

View File

@ -2508,36 +2508,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
}
/**
* Can we handle content that was previously saved containing vocabulary that
* is no longer valid
*/
@Test
public void testResourceInDatabaseContainsInvalidVocabulary() {
final Patient p = new Patient();
p.setGender(AdministrativeGender.MALE);
final IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
TransactionTemplate tx = new TransactionTemplate(myTxManager);
tx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
tx.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8));
table.setEncoding(ResourceEncodingEnum.JSON);
myResourceTableDao.save(table);
}
});
Patient read = myPatientDao.read(id);
String string = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(read);
ourLog.info(string);
assertThat(string, containsString("value=\"foo\""));
}
@Test
public void testResourceInstanceMetaOperation() {

View File

@ -3,8 +3,6 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
@ -26,15 +24,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.*;
import org.mockito.ArgumentCaptor;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
@ -444,79 +439,6 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
}
}
@Test
public void testReindexing() {
Patient p = new Patient();
p.addName().setFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
myValueSetDao.create(vs, mySrd);
ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
mySystemDao.markAllResourcesForReindexing();
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(null, entity.getIndexStatus());
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(1), entity.getIndexStatus());
// Just make sure this doesn't cause a choke
mySystemDao.performReindexingPass(100000);
// Try making the resource unparseable
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
return null;
}
});
mySystemDao.performReindexingPass(null);
entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
}
});
assertEquals(Long.valueOf(2), entity.getIndexStatus());
}
@Test
public void testSystemMetaOperation() {

View File

@ -178,6 +178,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;

View File

@ -25,7 +25,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
}
@Test
public void testCreateWithUuidResourceStrategy() throws Exception {
public void testCreateWithUuidResourceStrategy() {
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
Patient p = new Patient();
@ -40,7 +40,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
}
@Test
public void testTransactionCreateWithUuidResourceStrategy() throws Exception {
public void testTransactionCreateWithUuidResourceStrategy() {
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
Organization org = new Organization();

View File

@ -2565,12 +2565,12 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
tx.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
String newContent = myFhirCtx.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8));
table.setEncoding(ResourceEncodingEnum.JSON);
myResourceTableDao.save(table);
myResourceHistoryTableDao.save(table);
}
});

View File

@ -3,10 +3,8 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.primitive.IdDt;
@ -26,6 +24,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
import org.junit.*;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
@ -443,7 +442,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
public void testReindexing() {
Patient p = new Patient();
p.addName().setFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualified();
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
@ -487,15 +486,19 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
template.execute(new TransactionCallback<ResourceTable>() {
@Override
public ResourceTable doInTransaction(TransactionStatus theStatus) {
ResourceTable table = myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
table.setEncoding(ResourceEncodingEnum.JSON);
table.setIndexStatus(null);
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
try {
table.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myEntityManager.merge(table);
myResourceHistoryTableDao.save(resourceHistoryTable);
ResourceTable table = myResourceTableDao.findOne(id.getIdPartAsLong());
table.setIndexStatus(null);
myResourceTableDao.save(table);
return null;
}
});

View File

@ -1026,6 +1026,14 @@
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>com.gemnasium</groupId>
<artifactId>gemnasium-maven-plugin</artifactId>
<version>0.2.0</version>
<configuration>
<projectSlug>github.com/jamesagnew/hapi-fhir</projectSlug>
</configuration>
</plugin>
<plugin>
<groupId>org.basepom.maven</groupId>
<artifactId>duplicate-finder-maven-plugin</artifactId>

View File

@ -19,6 +19,15 @@
and support for draft pre-release versions of FHIR are shown in
<span style="background: #EEB; padding: 3px;">YELLOW</span>.
</p>
<p>
Note also that after the release of the FHIR DSTU2 specification, the FHIR
standard itself stopped using the DSTUx naming scheme, in favour or naming
new releases STUx or simply Rx. Because HAPI FHIR already had draft support
for what was then called DSTU3 at this time, we did not update our naming
conventions until R4 in order to avoid breaking existing users' code.
From the perspective of a user of HAPI FHIR, consider the terms
DSTU3 / STU3 / R3 to be interchangeable.
</p>
<table>
<thead>
<tr>
@ -135,6 +144,14 @@
<td style="background: #CEC;">3.0.1</td>
<td style="background: #EEB;">Draft 3.1.0-12370</td>
</tr>
<tr>
<td>HAPI FHIR 3.1.0</td>
<td style="background: #DDD;"></td>
<td style="background: #CEC;">1.0.2</td>
<td style="background: #EEB;">Draft 1.4.0</td>
<td style="background: #CEC;">3.0.1</td>
<td style="background: #EEB;">Draft 3.1.0-12370</td>
</tr>
</tbody>
</table>