Squashed commit of the following:
commit7ff895de77
Author: James Agnew <jamesagnew@gmail.com> Date: Fri Oct 6 15:25:06 2017 -0400 More test fixes commitc9fee23e48
Author: James Agnew <jamesagnew@gmail.com> Date: Fri Oct 6 15:14:52 2017 -0400 More tests work commitc796e19458
Author: James Agnew <jamesagnew@gmail.com> Date: Fri Oct 6 15:00:26 2017 -0400 Get tests passing commiteb2787d30c
Author: James Agnew <jamesagnew@gmail.com> Date: Fri Oct 6 14:08:23 2017 -0400 Add an optimistic lock to the ResourceTable commitff85503acb
Author: James <jamesagnew@gmail.com> Date: Fri Oct 6 08:56:35 2017 -0400 Add a test
This commit is contained in:
parent
b4127674e4
commit
bacd0bfbbb
|
@ -1011,14 +1011,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (theResource instanceof IResource) {
|
|
||||||
String title = ResourceMetadataKeyEnum.TITLE.get((IResource) theResource);
|
|
||||||
if (title != null && title.length() > BaseHasResource.MAX_TITLE_LENGTH) {
|
|
||||||
title = title.substring(0, BaseHasResource.MAX_TITLE_LENGTH);
|
|
||||||
}
|
|
||||||
theEntity.setTitle(title);
|
|
||||||
}
|
|
||||||
|
|
||||||
return changed;
|
return changed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1052,10 +1044,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
|
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
|
||||||
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
||||||
|
|
||||||
if (theEntity.getTitle() != null) {
|
|
||||||
ResourceMetadataKeyEnum.TITLE.put(res, theEntity.getTitle());
|
|
||||||
}
|
|
||||||
|
|
||||||
Collection<? extends BaseTag> tags = theEntity.getTags();
|
Collection<? extends BaseTag> tags = theEntity.getTags();
|
||||||
if (theEntity.isHasTags()) {
|
if (theEntity.isHasTags()) {
|
||||||
TagList tagList = new TagList();
|
TagList tagList = new TagList();
|
||||||
|
|
|
@ -20,52 +20,54 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Date;
|
|
||||||
|
|
||||||
import javax.persistence.*;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||||
|
import org.hibernate.annotations.OptimisticLock;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
@MappedSuperclass
|
@MappedSuperclass
|
||||||
public abstract class BaseHasResource {
|
public abstract class BaseHasResource {
|
||||||
|
|
||||||
public static final int MAX_TITLE_LENGTH = 100;
|
|
||||||
|
|
||||||
@Column(name = "RES_DELETED_AT", nullable = true)
|
@Column(name = "RES_DELETED_AT", nullable = true)
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
private Date myDeleted;
|
private Date myDeleted;
|
||||||
|
|
||||||
@Column(name = "RES_ENCODING", nullable = false, length = 5)
|
@Column(name = "RES_ENCODING", nullable = false, length = 5)
|
||||||
@Enumerated(EnumType.STRING)
|
@Enumerated(EnumType.STRING)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private ResourceEncodingEnum myEncoding;
|
private ResourceEncodingEnum myEncoding;
|
||||||
|
|
||||||
@Column(name = "RES_VERSION", nullable = true, length = 7)
|
@Column(name = "RES_VERSION", nullable = true, length = 7)
|
||||||
@Enumerated(EnumType.STRING)
|
@Enumerated(EnumType.STRING)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private FhirVersionEnum myFhirVersion;
|
private FhirVersionEnum myFhirVersion;
|
||||||
|
|
||||||
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false)
|
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false)
|
||||||
@JoinColumn(name = "FORCED_ID_PID")
|
@JoinColumn(name = "FORCED_ID_PID")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private ForcedId myForcedId;
|
private ForcedId myForcedId;
|
||||||
|
|
||||||
@Column(name = "HAS_TAGS", nullable = false)
|
@Column(name = "HAS_TAGS", nullable = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myHasTags;
|
private boolean myHasTags;
|
||||||
|
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Column(name = "RES_PUBLISHED", nullable = false)
|
@Column(name = "RES_PUBLISHED", nullable = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Date myPublished;
|
private Date myPublished;
|
||||||
|
|
||||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
|
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
|
||||||
@Lob()
|
@Lob()
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private byte[] myResource;
|
private byte[] myResource;
|
||||||
|
|
||||||
@Column(name = "RES_TITLE", nullable = true, length = MAX_TITLE_LENGTH)
|
|
||||||
private String myTitle;
|
|
||||||
|
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Column(name = "RES_UPDATED", nullable = false)
|
@Column(name = "RES_UPDATED", nullable = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Date myUpdated;
|
private Date myUpdated;
|
||||||
|
|
||||||
public abstract BaseTag addTag(TagDefinition theDef);
|
public abstract BaseTag addTag(TagDefinition theDef);
|
||||||
|
@ -74,18 +76,36 @@ public abstract class BaseHasResource {
|
||||||
return myDeleted;
|
return myDeleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setDeleted(Date theDate) {
|
||||||
|
myDeleted = theDate;
|
||||||
|
}
|
||||||
|
|
||||||
public ResourceEncodingEnum getEncoding() {
|
public ResourceEncodingEnum getEncoding() {
|
||||||
return myEncoding;
|
return myEncoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
public FhirVersionEnum getFhirVersion() {
|
public FhirVersionEnum getFhirVersion() {
|
||||||
return myFhirVersion;
|
return myFhirVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
|
||||||
|
myFhirVersion = theFhirVersion;
|
||||||
|
}
|
||||||
|
|
||||||
public ForcedId getForcedId() {
|
public ForcedId getForcedId() {
|
||||||
return myForcedId;
|
return myForcedId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setForcedId(ForcedId theForcedId) {
|
||||||
|
myForcedId = theForcedId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract Long getId();
|
||||||
|
|
||||||
public abstract IdDt getIdDt();
|
public abstract IdDt getIdDt();
|
||||||
|
|
||||||
public InstantDt getPublished() {
|
public InstantDt getPublished() {
|
||||||
|
@ -96,22 +116,30 @@ public abstract class BaseHasResource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setPublished(InstantDt thePublished) {
|
||||||
|
myPublished = thePublished.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
public byte[] getResource() {
|
public byte[] getResource() {
|
||||||
return myResource;
|
return myResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setResource(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
public abstract String getResourceType();
|
public abstract String getResourceType();
|
||||||
|
|
||||||
public abstract Collection<? extends BaseTag> getTags();
|
public abstract Collection<? extends BaseTag> getTags();
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return myTitle;
|
|
||||||
}
|
|
||||||
|
|
||||||
public InstantDt getUpdated() {
|
public InstantDt getUpdated() {
|
||||||
return new InstantDt(myUpdated);
|
return new InstantDt(myUpdated);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setUpdated(InstantDt theUpdated) {
|
||||||
|
myUpdated = theUpdated.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
public Date getUpdatedDate() {
|
public Date getUpdatedDate() {
|
||||||
return myUpdated;
|
return myUpdated;
|
||||||
}
|
}
|
||||||
|
@ -122,24 +150,6 @@ public abstract class BaseHasResource {
|
||||||
return myHasTags;
|
return myHasTags;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setDeleted(Date theDate) {
|
|
||||||
myDeleted = theDate;
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract Long getId();
|
|
||||||
|
|
||||||
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
|
||||||
myEncoding = theEncoding;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
|
|
||||||
myFhirVersion = theFhirVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setForcedId(ForcedId theForcedId) {
|
|
||||||
myForcedId = theForcedId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setHasTags(boolean theHasTags) {
|
public void setHasTags(boolean theHasTags) {
|
||||||
myHasTags = theHasTags;
|
myHasTags = theHasTags;
|
||||||
}
|
}
|
||||||
|
@ -148,24 +158,8 @@ public abstract class BaseHasResource {
|
||||||
myPublished = thePublished;
|
myPublished = thePublished;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPublished(InstantDt thePublished) {
|
|
||||||
myPublished = thePublished.getValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResource(byte[] theResource) {
|
|
||||||
myResource = theResource;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTitle(String theTitle) {
|
|
||||||
myTitle = theTitle;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUpdated(Date theUpdated) {
|
public void setUpdated(Date theUpdated) {
|
||||||
myUpdated = theUpdated;
|
myUpdated = theUpdated;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setUpdated(InstantDt theUpdated) {
|
|
||||||
myUpdated = theUpdated.getValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
||||||
import org.apache.lucene.analysis.standard.StandardFilterFactory;
|
import org.apache.lucene.analysis.standard.StandardFilterFactory;
|
||||||
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
||||||
import org.hibernate.annotations.ColumnDefault;
|
import org.hibernate.annotations.ColumnDefault;
|
||||||
|
import org.hibernate.annotations.OptimisticLock;
|
||||||
import org.hibernate.search.annotations.*;
|
import org.hibernate.search.annotations.*;
|
||||||
import org.hibernate.search.annotations.Parameter;
|
import org.hibernate.search.annotations.Parameter;
|
||||||
|
|
||||||
|
@ -124,12 +125,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
@Field(name = "myContentTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
@Field(name = "myContentTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||||
@Field(name = "myContentTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
@Field(name = "myContentTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||||
})
|
})
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myContentText;
|
private String myContentText;
|
||||||
|
|
||||||
@Column(name = "HASH_SHA256", length = 64, nullable = true)
|
@Column(name = "HASH_SHA256", length = 64, nullable = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myHashSha256;
|
private String myHashSha256;
|
||||||
|
|
||||||
@Column(name = "SP_HAS_LINKS")
|
@Column(name = "SP_HAS_LINKS")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myHasLinks;
|
private boolean myHasLinks;
|
||||||
|
|
||||||
@Id
|
@Id
|
||||||
|
@ -139,12 +143,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceLink> myIncomingResourceLinks;
|
private Collection<ResourceLink> myIncomingResourceLinks;
|
||||||
|
|
||||||
@Column(name = "SP_INDEX_STATUS", nullable = true)
|
@Column(name = "SP_INDEX_STATUS", nullable = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Long myIndexStatus;
|
private Long myIndexStatus;
|
||||||
|
|
||||||
@Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true)
|
@Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myLanguage;
|
private String myLanguage;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -157,69 +164,100 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
@Field(name = "myNarrativeTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
@Field(name = "myNarrativeTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||||
@Field(name = "myNarrativeTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
@Field(name = "myNarrativeTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||||
})
|
})
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myNarrativeText;
|
private String myNarrativeText;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
|
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
|
||||||
|
|
||||||
@Column(name = "SP_COORDS_PRESENT")
|
@Column(name = "SP_COORDS_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsCoordsPopulated;
|
private boolean myParamsCoordsPopulated;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamDate> myParamsDate;
|
private Collection<ResourceIndexedSearchParamDate> myParamsDate;
|
||||||
|
|
||||||
@Column(name = "SP_DATE_PRESENT")
|
@Column(name = "SP_DATE_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsDatePopulated;
|
private boolean myParamsDatePopulated;
|
||||||
|
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
private Collection<ResourceIndexedSearchParamNumber> myParamsNumber;
|
private Collection<ResourceIndexedSearchParamNumber> myParamsNumber;
|
||||||
|
|
||||||
@Column(name = "SP_NUMBER_PRESENT")
|
@Column(name = "SP_NUMBER_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsNumberPopulated;
|
private boolean myParamsNumberPopulated;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity;
|
private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity;
|
||||||
|
|
||||||
@Column(name = "SP_QUANTITY_PRESENT")
|
@Column(name = "SP_QUANTITY_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsQuantityPopulated;
|
private boolean myParamsQuantityPopulated;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamString> myParamsString;
|
private Collection<ResourceIndexedSearchParamString> myParamsString;
|
||||||
|
|
||||||
@Column(name = "SP_STRING_PRESENT")
|
@Column(name = "SP_STRING_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsStringPopulated;
|
private boolean myParamsStringPopulated;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamToken> myParamsToken;
|
private Collection<ResourceIndexedSearchParamToken> myParamsToken;
|
||||||
|
|
||||||
@Column(name = "SP_TOKEN_PRESENT")
|
@Column(name = "SP_TOKEN_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsTokenPopulated;
|
private boolean myParamsTokenPopulated;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceIndexedSearchParamUri> myParamsUri;
|
private Collection<ResourceIndexedSearchParamUri> myParamsUri;
|
||||||
|
|
||||||
@Column(name = "SP_URI_PRESENT")
|
@Column(name = "SP_URI_PRESENT")
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private boolean myParamsUriPopulated;
|
private boolean myParamsUriPopulated;
|
||||||
|
|
||||||
@Column(name = "RES_PROFILE", length = MAX_PROFILE_LENGTH, nullable = true)
|
@Column(name = "RES_PROFILE", length = MAX_PROFILE_LENGTH, nullable = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myProfile;
|
private String myProfile;
|
||||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
|
||||||
private Collection<ResourceIndexedCompositeStringUnique> myParamsCompositeStringUnique;
|
|
||||||
// Added in 3.0.0 - Should make this a primitive Boolean at some point
|
// Added in 3.0.0 - Should make this a primitive Boolean at some point
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
@Column(name = "SP_CMPSTR_UNIQ_PRESENT")
|
@Column(name = "SP_CMPSTR_UNIQ_PRESENT")
|
||||||
private Boolean myParamsCompositeStringUniquePresent = false;
|
private Boolean myParamsCompositeStringUniquePresent = false;
|
||||||
|
|
||||||
|
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
|
private Collection<ResourceIndexedCompositeStringUnique> myParamsCompositeStringUnique;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "mySourceResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "mySourceResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
@IndexedEmbedded()
|
@IndexedEmbedded()
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceLink> myResourceLinks;
|
private Collection<ResourceLink> myResourceLinks;
|
||||||
|
|
||||||
@Column(name = "RES_TYPE", length = RESTYPE_LEN)
|
@Column(name = "RES_TYPE", length = RESTYPE_LEN)
|
||||||
@Field
|
@Field
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private String myResourceType;
|
private String myResourceType;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<SearchParamPresent> mySearchParamPresents;
|
private Collection<SearchParamPresent> mySearchParamPresents;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
private Set<ResourceTag> myTags;
|
private Set<ResourceTag> myTags;
|
||||||
|
|
||||||
@Transient
|
@Transient
|
||||||
private transient boolean myUnchangedInCurrentOperation;
|
private transient boolean myUnchangedInCurrentOperation;
|
||||||
|
|
||||||
|
@Version
|
||||||
@Column(name = "RES_VER")
|
@Column(name = "RES_VER")
|
||||||
private long myVersion;
|
private long myVersion;
|
||||||
|
|
||||||
|
@ -555,7 +593,6 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
retVal.setResourceType(myResourceType);
|
retVal.setResourceType(myResourceType);
|
||||||
retVal.setVersion(myVersion);
|
retVal.setVersion(myVersion);
|
||||||
|
|
||||||
retVal.setTitle(getTitle());
|
|
||||||
retVal.setPublished(getPublished());
|
retVal.setPublished(getPublished());
|
||||||
retVal.setUpdated(getUpdated());
|
retVal.setUpdated(getUpdated());
|
||||||
retVal.setEncoding(getEncoding());
|
retVal.setEncoding(getEncoding());
|
||||||
|
|
|
@ -89,7 +89,7 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
||||||
searchParam = new SearchParam();
|
searchParam = new SearchParam();
|
||||||
searchParam.setResourceName(resourceType);
|
searchParam.setResourceName(resourceType);
|
||||||
searchParam.setParamName(paramName);
|
searchParam.setParamName(paramName);
|
||||||
searchParam = mySearchParamDao.saveAndFlush(searchParam);
|
searchParam = mySearchParamDao.save(searchParam);
|
||||||
ourLog.info("Added search param {} with pid {}", paramName, searchParam.getId());
|
ourLog.info("Added search param {} with pid {}", paramName, searchParam.getId());
|
||||||
// Don't add the newly saved entity to the map in case the save fails
|
// Don't add the newly saved entity to the map in case the save fails
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
|
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||||
import org.hl7.fhir.exceptions.FHIRException;
|
import org.hl7.fhir.exceptions.FHIRException;
|
||||||
|
@ -51,8 +52,12 @@ import org.springframework.messaging.MessageHandler;
|
||||||
import org.springframework.messaging.SubscribableChannel;
|
import org.springframework.messaging.SubscribableChannel;
|
||||||
import org.springframework.messaging.support.ExecutorSubscribableChannel;
|
import org.springframework.messaging.support.ExecutorSubscribableChannel;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
import org.springframework.transaction.TransactionStatus;
|
||||||
|
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||||
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
||||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
import javax.annotation.PreDestroy;
|
import javax.annotation.PreDestroy;
|
||||||
|
@ -87,6 +92,9 @@ public abstract class BaseSubscriptionInterceptor<S extends IBaseResource> exten
|
||||||
@Autowired(required = false)
|
@Autowired(required = false)
|
||||||
@Qualifier("myEventDefinitionDaoR4")
|
@Qualifier("myEventDefinitionDaoR4")
|
||||||
private IFhirResourceDao<org.hl7.fhir.r4.model.EventDefinition> myEventDefinitionDaoR4;
|
private IFhirResourceDao<org.hl7.fhir.r4.model.EventDefinition> myEventDefinitionDaoR4;
|
||||||
|
@Autowired
|
||||||
|
private PlatformTransactionManager myTxManager;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
|
@ -368,6 +376,11 @@ public abstract class BaseSubscriptionInterceptor<S extends IBaseResource> exten
|
||||||
myResourceDaos = theResourceDaos;
|
myResourceDaos = theResourceDaos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setTxManager(PlatformTransactionManager theTxManager) {
|
||||||
|
myTxManager = theTxManager;
|
||||||
|
}
|
||||||
|
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
public void start() {
|
public void start() {
|
||||||
for (IFhirResourceDao<?> next : myResourceDaos) {
|
for (IFhirResourceDao<?> next : myResourceDaos) {
|
||||||
|
@ -452,8 +465,14 @@ public abstract class BaseSubscriptionInterceptor<S extends IBaseResource> exten
|
||||||
registerSubscriptionCheckingSubscriber();
|
registerSubscriptionCheckingSubscriber();
|
||||||
registerDeliverySubscriber();
|
registerDeliverySubscriber();
|
||||||
|
|
||||||
|
TransactionTemplate transactionTemplate = new TransactionTemplate(myTxManager);
|
||||||
|
transactionTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus status) {
|
||||||
initSubscriptions();
|
initSubscriptions();
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
protected void submitResourceModified(final ResourceModifiedMessage theMsg) {
|
protected void submitResourceModified(final ResourceModifiedMessage theMsg) {
|
||||||
mySubscriptionActivatingSubscriber.handleMessage(theMsg.getOperationType(), theMsg.getId(myCtx), theMsg.getNewPayload(myCtx));
|
mySubscriptionActivatingSubscriber.handleMessage(theMsg.getOperationType(), theMsg.getId(myCtx), theMsg.getNewPayload(myCtx));
|
||||||
|
|
|
@ -32,6 +32,8 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.messaging.Message;
|
import org.springframework.messaging.Message;
|
||||||
import org.springframework.messaging.MessagingException;
|
import org.springframework.messaging.MessagingException;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
@ -53,22 +55,27 @@ public class SubscriptionActivatingSubscriber {
|
||||||
myCtx = theSubscriptionDao.getContext();
|
myCtx = theSubscriptionDao.getContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void activateAndRegisterSubscriptionIfRequired(IBaseResource theSubscription) {
|
public void activateAndRegisterSubscriptionIfRequired(final IBaseResource theSubscription) {
|
||||||
boolean subscriptionTypeApplies = BaseSubscriptionSubscriber.subscriptionTypeApplies(myCtx, theSubscription, myChannelType);
|
boolean subscriptionTypeApplies = BaseSubscriptionSubscriber.subscriptionTypeApplies(myCtx, theSubscription, myChannelType);
|
||||||
if (subscriptionTypeApplies == false) {
|
if (subscriptionTypeApplies == false) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
IPrimitiveType<?> status = myCtx.newTerser().getSingleValueOrNull(theSubscription, BaseSubscriptionInterceptor.SUBSCRIPTION_STATUS, IPrimitiveType.class);
|
final IPrimitiveType<?> status = myCtx.newTerser().getSingleValueOrNull(theSubscription, BaseSubscriptionInterceptor.SUBSCRIPTION_STATUS, IPrimitiveType.class);
|
||||||
String statusString = status.getValueAsString();
|
String statusString = status.getValueAsString();
|
||||||
|
|
||||||
String requestedStatus = Subscription.SubscriptionStatus.REQUESTED.toCode();
|
final String requestedStatus = Subscription.SubscriptionStatus.REQUESTED.toCode();
|
||||||
String activeStatus = Subscription.SubscriptionStatus.ACTIVE.toCode();
|
final String activeStatus = Subscription.SubscriptionStatus.ACTIVE.toCode();
|
||||||
if (requestedStatus.equals(statusString)) {
|
if (requestedStatus.equals(statusString)) {
|
||||||
|
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() {
|
||||||
|
@Override
|
||||||
|
public void afterCommit() {
|
||||||
status.setValueAsString(activeStatus);
|
status.setValueAsString(activeStatus);
|
||||||
ourLog.info("Activating and registering subscription {} from status {} to {}", theSubscription.getIdElement().toUnqualified().getValue(), requestedStatus, activeStatus);
|
ourLog.info("Activating and registering subscription {} from status {} to {}", theSubscription.getIdElement().toUnqualified().getValue(), requestedStatus, activeStatus);
|
||||||
mySubscriptionDao.update(theSubscription);
|
mySubscriptionDao.update(theSubscription);
|
||||||
mySubscriptionInterceptor.registerSubscription(theSubscription.getIdElement(), theSubscription);
|
mySubscriptionInterceptor.registerSubscription(theSubscription.getIdElement(), theSubscription);
|
||||||
|
}
|
||||||
|
});
|
||||||
} else if (activeStatus.equals(statusString)) {
|
} else if (activeStatus.equals(statusString)) {
|
||||||
if (!mySubscriptionInterceptor.hasSubscription(theSubscription.getIdElement())) {
|
if (!mySubscriptionInterceptor.hasSubscription(theSubscription.getIdElement())) {
|
||||||
ourLog.info("Registering active subscription {}", theSubscription.getIdElement().toUnqualified().getValue());
|
ourLog.info("Registering active subscription {}", theSubscription.getIdElement().toUnqualified().getValue());
|
||||||
|
|
|
@ -8,6 +8,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
import javax.persistence.EntityManagerFactory;
|
import javax.persistence.EntityManagerFactory;
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||||
import org.apache.commons.dbcp2.BasicDataSource;
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||||
import org.springframework.context.annotation.*;
|
import org.springframework.context.annotation.*;
|
||||||
|
@ -35,7 +36,7 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean()
|
@Bean()
|
||||||
public DataSource dataSource() {
|
public BasicDataSource basicDataSource() {
|
||||||
BasicDataSource retVal = new BasicDataSource() {
|
BasicDataSource retVal = new BasicDataSource() {
|
||||||
|
|
||||||
|
|
||||||
|
@ -92,13 +93,20 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
||||||
* and catch any potential deadlocks caused by database connection
|
* and catch any potential deadlocks caused by database connection
|
||||||
* starvation
|
* starvation
|
||||||
*/
|
*/
|
||||||
int maxThreads = (int) (Math.random() * 6) + 1;
|
int maxThreads = (int) (Math.random() * 6.0) + 1;
|
||||||
retVal.setMaxTotal(maxThreads);
|
retVal.setMaxTotal(maxThreads);
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean()
|
||||||
|
@Primary()
|
||||||
|
public DataSource dataSource() {
|
||||||
|
|
||||||
DataSource dataSource = ProxyDataSourceBuilder
|
DataSource dataSource = ProxyDataSourceBuilder
|
||||||
.create(retVal)
|
.create(basicDataSource())
|
||||||
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||||
.logSlowQueryBySlf4j(100, TimeUnit.MILLISECONDS)
|
.logSlowQueryBySlf4j(1000, TimeUnit.MILLISECONDS)
|
||||||
.countQuery()
|
.countQuery()
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
|
|
@ -94,12 +94,12 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
* and catch any potential deadlocks caused by database connection
|
* and catch any potential deadlocks caused by database connection
|
||||||
* starvation
|
* starvation
|
||||||
*/
|
*/
|
||||||
int maxThreads = (int) (Math.random() * 6) + 1;
|
int maxThreads = (int) (Math.random() * 6.0) + 1;
|
||||||
retVal.setMaxTotal(maxThreads);
|
retVal.setMaxTotal(maxThreads);
|
||||||
|
|
||||||
DataSource dataSource = ProxyDataSourceBuilder
|
DataSource dataSource = ProxyDataSourceBuilder
|
||||||
.create(retVal)
|
.create(retVal)
|
||||||
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||||
.countQuery(new ThreadQueryCountHolder())
|
.countQuery(new ThreadQueryCountHolder())
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -921,7 +921,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
||||||
List<Patient> patients = toList(myPatientDao.search(params));
|
List<Patient> patients = toList(myPatientDao.search(params));
|
||||||
assertEquals(1, patients.size());
|
assertEquals(1, patients.size());
|
||||||
assertEquals(id1.getIdPart(), patients.get(0).getId().getIdPart());
|
assertEquals(id1.getIdPart(), patients.get(0).getId().getIdPart());
|
||||||
assertEquals("P1TITLE", ResourceMetadataKeyEnum.TITLE.get(patients.get(0)));
|
|
||||||
|
|
||||||
// Given name shouldn't return for family param
|
// Given name shouldn't return for family param
|
||||||
params = new SearchParameterMap();
|
params = new SearchParameterMap();
|
||||||
|
|
|
@ -0,0 +1,153 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.util.StopWatch;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import com.phloc.commons.compare.ReverseComparator;
|
||||||
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
|
import org.hl7.fhir.dstu3.model.Bundle;
|
||||||
|
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
|
||||||
|
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
|
||||||
|
import org.hl7.fhir.dstu3.model.IdType;
|
||||||
|
import org.hl7.fhir.dstu3.model.Organization;
|
||||||
|
import org.hl7.fhir.dstu3.model.Patient;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.util.comparator.ComparableComparator;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class FhirDaoConcurrencyDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
|
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirDaoConcurrencyDstu3Test.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
public BasicDataSource myBasicDataSource;
|
||||||
|
private int myMaxTotal;
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void afterResetConnectionPool() {
|
||||||
|
myBasicDataSource.setMaxTotal(myMaxTotal);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void beforeSetUpConnectionPool() {
|
||||||
|
myMaxTotal = myBasicDataSource.getMaxTotal();
|
||||||
|
myBasicDataSource.setMaxTotal(5);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMultipleConcurrentWritesToSameResource() throws InterruptedException {
|
||||||
|
|
||||||
|
ThreadPoolExecutor exec = new ThreadPoolExecutor(10, 10,
|
||||||
|
0L, TimeUnit.MILLISECONDS,
|
||||||
|
new LinkedBlockingQueue<Runnable>());
|
||||||
|
|
||||||
|
final AtomicInteger errors = new AtomicInteger();
|
||||||
|
|
||||||
|
List<Future> futures = new ArrayList<>();
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
final Patient p = new Patient();
|
||||||
|
p.setId("PID");
|
||||||
|
p.setActive(true);
|
||||||
|
p.setBirthDate(new Date());
|
||||||
|
p.addIdentifier().setSystem("foo1");
|
||||||
|
p.addIdentifier().setSystem("foo2");
|
||||||
|
p.addIdentifier().setSystem("foo3");
|
||||||
|
p.addIdentifier().setSystem("foo4");
|
||||||
|
p.addName().setFamily("FOO" + i);
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB1");
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB2");
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB3");
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB4");
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB5");
|
||||||
|
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB6");
|
||||||
|
|
||||||
|
Organization o = new Organization();
|
||||||
|
o.setName("ORG" + i);
|
||||||
|
|
||||||
|
final Bundle t = new Bundle();
|
||||||
|
t.setType(BundleType.TRANSACTION);
|
||||||
|
t.addEntry()
|
||||||
|
.setResource(p)
|
||||||
|
.getRequest()
|
||||||
|
.setUrl("Patient/PID")
|
||||||
|
.setMethod(HTTPVerb.PUT);
|
||||||
|
t.addEntry()
|
||||||
|
.setResource(o)
|
||||||
|
.getRequest()
|
||||||
|
.setUrl("Organization")
|
||||||
|
.setMethod(HTTPVerb.POST);
|
||||||
|
|
||||||
|
if (i == 0) {
|
||||||
|
mySystemDao.transaction(mySrd, t);
|
||||||
|
}
|
||||||
|
futures.add(exec.submit(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
mySystemDao.transaction(mySrd, t);
|
||||||
|
} catch (Exception e) {
|
||||||
|
ourLog.error("Failed to update", e);
|
||||||
|
errors.incrementAndGet();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info("Shutting down excutor");
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
|
for (Future next : futures) {
|
||||||
|
while (!next.isDone()) {
|
||||||
|
Thread.sleep(20);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exec.shutdown();
|
||||||
|
ourLog.info("Shut down excutor in {}ms", sw.getMillis());
|
||||||
|
ourLog.info("Had {} errors", errors.get());
|
||||||
|
|
||||||
|
Patient currentPatient = myPatientDao.read(new IdType("Patient/PID"));
|
||||||
|
Long currentVersion = currentPatient.getIdElement().getVersionIdPartAsLong();
|
||||||
|
ourLog.info("Current version: {}", currentVersion);
|
||||||
|
|
||||||
|
IBundleProvider historyBundle = myPatientDao.history(new IdType("Patient/PID"),null,null,mySrd);
|
||||||
|
List<IBaseResource> resources = historyBundle.getResources(0, 1000);
|
||||||
|
List<Long> versions = new ArrayList<>();
|
||||||
|
for (IBaseResource next : resources) {
|
||||||
|
versions.add(next.getIdElement().getVersionIdPartAsLong());
|
||||||
|
}
|
||||||
|
|
||||||
|
String message = "Current version is " + currentVersion + " - History is: " + versions;
|
||||||
|
ourLog.info(message);
|
||||||
|
|
||||||
|
Collections.sort(versions, new ReverseComparator<>(new ComparableComparator<Long>()));
|
||||||
|
Long lastVersion = versions.get(0);
|
||||||
|
ourLog.info("Last version: {}", lastVersion);
|
||||||
|
|
||||||
|
//assertEquals(message, currentVersion.intValue(), versions.size());
|
||||||
|
assertEquals(message, currentVersion, lastVersion);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -252,6 +252,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
||||||
p.setId("Patient/A");
|
p.setId("Patient/A");
|
||||||
String id = myPatientDao.update(p).getId().getValue();
|
String id = myPatientDao.update(p).getId().getValue();
|
||||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||||
|
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||||
|
|
||||||
// Second time should not result in an update
|
// Second time should not result in an update
|
||||||
p = new Patient();
|
p = new Patient();
|
||||||
|
@ -259,6 +260,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
||||||
p.setId("Patient/A");
|
p.setId("Patient/A");
|
||||||
id = myPatientDao.update(p).getId().getValue();
|
id = myPatientDao.update(p).getId().getValue();
|
||||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||||
|
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||||
|
|
||||||
// And third time should not result in an update
|
// And third time should not result in an update
|
||||||
p = new Patient();
|
p = new Patient();
|
||||||
|
@ -266,6 +268,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
||||||
p.setId("Patient/A");
|
p.setId("Patient/A");
|
||||||
id = myPatientDao.update(p).getId().getValue();
|
id = myPatientDao.update(p).getId().getValue();
|
||||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||||
|
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||||
|
|
||||||
myPatientDao.read(new IdType("Patient/A"));
|
myPatientDao.read(new IdType("Patient/A"));
|
||||||
myPatientDao.read(new IdType("Patient/A/_history/1"));
|
myPatientDao.read(new IdType("Patient/A/_history/1"));
|
||||||
|
|
|
@ -62,43 +62,6 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTransactionWhichFailsPersistsNothing() {
|
|
||||||
|
|
||||||
// Run a transaction which points to that practitioner
|
|
||||||
// in a field that isn't allowed to refer to a practitioner
|
|
||||||
Bundle input = new Bundle();
|
|
||||||
input.setType(BundleType.TRANSACTION);
|
|
||||||
|
|
||||||
Patient pt = new Patient();
|
|
||||||
pt.setId("PT");
|
|
||||||
pt.setActive(true);
|
|
||||||
pt.addName().setFamily("FAMILY");
|
|
||||||
input.addEntry()
|
|
||||||
.setResource(pt)
|
|
||||||
.getRequest().setMethod(HTTPVerb.PUT).setUrl("Patient/PT");
|
|
||||||
|
|
||||||
Observation obs = new Observation();
|
|
||||||
obs.setId("OBS");
|
|
||||||
obs.getCode().addCoding().setSystem("foo").setCode("bar");
|
|
||||||
obs.addPerformer().setReference("Practicioner/AAAAA");
|
|
||||||
input.addEntry()
|
|
||||||
.setResource(obs)
|
|
||||||
.getRequest().setMethod(HTTPVerb.PUT).setUrl("Observation/OBS");
|
|
||||||
|
|
||||||
try {
|
|
||||||
mySystemDao.transaction(mySrd, input);
|
|
||||||
fail();
|
|
||||||
} catch (UnprocessableEntityException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Resource type 'Practicioner' is not valid for this path"));
|
|
||||||
}
|
|
||||||
|
|
||||||
assertThat(myResourceTableDao.findAll(), empty());
|
|
||||||
assertThat(myResourceIndexedSearchParamStringDao.findAll(), empty());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private Bundle createInputTransactionWithPlaceholderIdInMatchUrl(HTTPVerb theVerb) {
|
private Bundle createInputTransactionWithPlaceholderIdInMatchUrl(HTTPVerb theVerb) {
|
||||||
|
|
||||||
Patient pat = new Patient();
|
Patient pat = new Patient();
|
||||||
|
@ -209,6 +172,11 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Bundle loadBundle(String theFileName) throws IOException {
|
||||||
|
String req = IOUtils.toString(FhirSystemDaoDstu3Test.class.getResourceAsStream(theFileName), StandardCharsets.UTF_8);
|
||||||
|
return myFhirCtx.newXmlParser().parseResource(Bundle.class, req);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchCreateWithBadRead() {
|
public void testBatchCreateWithBadRead() {
|
||||||
Bundle request = new Bundle();
|
Bundle request = new Bundle();
|
||||||
|
@ -1222,8 +1190,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTransactionCreateWithPutUsingUrl2() throws Exception {
|
public void testTransactionCreateWithPutUsingUrl2() throws Exception {
|
||||||
String req = IOUtils.toString(FhirSystemDaoDstu3Test.class.getResourceAsStream("/bundle-dstu3.xml"), StandardCharsets.UTF_8);
|
Bundle request = loadBundle("/bundle-dstu3.xml");
|
||||||
Bundle request = myFhirCtx.newXmlParser().parseResource(Bundle.class, req);
|
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2147,6 +2114,42 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTransactionWhichFailsPersistsNothing() {
|
||||||
|
|
||||||
|
// Run a transaction which points to that practitioner
|
||||||
|
// in a field that isn't allowed to refer to a practitioner
|
||||||
|
Bundle input = new Bundle();
|
||||||
|
input.setType(BundleType.TRANSACTION);
|
||||||
|
|
||||||
|
Patient pt = new Patient();
|
||||||
|
pt.setId("PT");
|
||||||
|
pt.setActive(true);
|
||||||
|
pt.addName().setFamily("FAMILY");
|
||||||
|
input.addEntry()
|
||||||
|
.setResource(pt)
|
||||||
|
.getRequest().setMethod(HTTPVerb.PUT).setUrl("Patient/PT");
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setId("OBS");
|
||||||
|
obs.getCode().addCoding().setSystem("foo").setCode("bar");
|
||||||
|
obs.addPerformer().setReference("Practicioner/AAAAA");
|
||||||
|
input.addEntry()
|
||||||
|
.setResource(obs)
|
||||||
|
.getRequest().setMethod(HTTPVerb.PUT).setUrl("Observation/OBS");
|
||||||
|
|
||||||
|
try {
|
||||||
|
mySystemDao.transaction(mySrd, input);
|
||||||
|
fail();
|
||||||
|
} catch (UnprocessableEntityException e) {
|
||||||
|
assertThat(e.getMessage(), containsString("Resource type 'Practicioner' is not valid for this path"));
|
||||||
|
}
|
||||||
|
|
||||||
|
assertThat(myResourceTableDao.findAll(), empty());
|
||||||
|
assertThat(myResourceIndexedSearchParamStringDao.findAll(), empty());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format changed, source isn't valid
|
* Format changed, source isn't valid
|
||||||
*/
|
*/
|
||||||
|
@ -2815,6 +2818,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassClearContext() {
|
public static void afterClassClearContext() {
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
|
|
@ -303,7 +303,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
||||||
Patient pt1 = new Patient();
|
Patient pt1 = new Patient();
|
||||||
pt1.setGender(Enumerations.AdministrativeGender.MALE);
|
pt1.setGender(Enumerations.AdministrativeGender.MALE);
|
||||||
pt1.setBirthDateElement(new DateType("2011-01-01"));
|
pt1.setBirthDateElement(new DateType("2011-01-01"));
|
||||||
IIdType id1 = myPatientDao.create(pt1).getId().toUnqualifiedVersionless();
|
String id1 = myPatientDao.create(pt1).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
Patient pt2 = new Patient();
|
Patient pt2 = new Patient();
|
||||||
pt2.setGender(Enumerations.AdministrativeGender.MALE);
|
pt2.setGender(Enumerations.AdministrativeGender.MALE);
|
||||||
|
@ -316,7 +316,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
||||||
params.add("birthdate", new DateParam("2011-01-01"));
|
params.add("birthdate", new DateParam("2011-01-01"));
|
||||||
IBundleProvider results = myPatientDao.search(params);
|
IBundleProvider results = myPatientDao.search(params);
|
||||||
String searchId = results.getUuid();
|
String searchId = results.getUuid();
|
||||||
assertThat(toUnqualifiedVersionlessIdValues(results), containsInAnyOrder(id1.getValue()));
|
assertThat(toUnqualifiedVersionlessIdValues(results), containsInAnyOrder(id1));
|
||||||
assertEquals(SearchBuilder.HandlerTypeEnum.UNIQUE_INDEX, SearchBuilder.getLastHandlerMechanismForUnitTest());
|
assertEquals(SearchBuilder.HandlerTypeEnum.UNIQUE_INDEX, SearchBuilder.getLastHandlerMechanismForUnitTest());
|
||||||
|
|
||||||
// Other order
|
// Other order
|
||||||
|
@ -326,7 +326,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
||||||
params.add("gender", new TokenParam("http://hl7.org/fhir/administrative-gender", "male"));
|
params.add("gender", new TokenParam("http://hl7.org/fhir/administrative-gender", "male"));
|
||||||
results = myPatientDao.search(params);
|
results = myPatientDao.search(params);
|
||||||
assertEquals(searchId, results.getUuid());
|
assertEquals(searchId, results.getUuid());
|
||||||
assertThat(toUnqualifiedVersionlessIdValues(results), containsInAnyOrder(id1.getValue()));
|
assertThat(toUnqualifiedVersionlessIdValues(results), containsInAnyOrder(id1));
|
||||||
// Null because we just reuse the last search
|
// Null because we just reuse the last search
|
||||||
assertEquals(null, SearchBuilder.getLastHandlerMechanismForUnitTest());
|
assertEquals(null, SearchBuilder.getLastHandlerMechanismForUnitTest());
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.eclipse.jetty.servlet.ServletHolder;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
import org.springframework.web.context.ContextLoader;
|
import org.springframework.web.context.ContextLoader;
|
||||||
import org.springframework.web.context.WebApplicationContext;
|
import org.springframework.web.context.WebApplicationContext;
|
||||||
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
|
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
|
||||||
|
@ -47,6 +48,7 @@ public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
||||||
protected static GenericWebApplicationContext ourWebApplicationContext;
|
protected static GenericWebApplicationContext ourWebApplicationContext;
|
||||||
protected static SubscriptionRestHookInterceptor ourRestHookSubscriptionInterceptor;
|
protected static SubscriptionRestHookInterceptor ourRestHookSubscriptionInterceptor;
|
||||||
protected static DatabaseBackedPagingProvider ourPagingProvider;
|
protected static DatabaseBackedPagingProvider ourPagingProvider;
|
||||||
|
protected static PlatformTransactionManager ourTxManager;
|
||||||
|
|
||||||
public BaseResourceProviderDstu2Test() {
|
public BaseResourceProviderDstu2Test() {
|
||||||
super();
|
super();
|
||||||
|
@ -98,6 +100,7 @@ public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
||||||
ourWebApplicationContext.refresh();
|
ourWebApplicationContext.refresh();
|
||||||
|
|
||||||
ourRestHookSubscriptionInterceptor = ourWebApplicationContext.getBean(SubscriptionRestHookInterceptor.class);
|
ourRestHookSubscriptionInterceptor = ourWebApplicationContext.getBean(SubscriptionRestHookInterceptor.class);
|
||||||
|
ourTxManager = ourWebApplicationContext.getBean(PlatformTransactionManager.class);
|
||||||
|
|
||||||
proxyHandler.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, ourWebApplicationContext);
|
proxyHandler.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, ourWebApplicationContext);
|
||||||
|
|
||||||
|
|
|
@ -1,33 +1,43 @@
|
||||||
package ca.uhn.fhir.jpa.stresstest;
|
package ca.uhn.fhir.jpa.stresstest;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
|
||||||
import static org.junit.Assert.fail;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||||
import org.hl7.fhir.dstu3.model.*;
|
import org.hl7.fhir.dstu3.model.Bundle;
|
||||||
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
|
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
|
||||||
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
|
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
|
||||||
import org.junit.*;
|
import org.hl7.fhir.dstu3.model.CodeableConcept;
|
||||||
|
import org.hl7.fhir.dstu3.model.Coding;
|
||||||
|
import org.hl7.fhir.dstu3.model.Patient;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.util.List;
|
||||||
import com.google.common.collect.Lists;
|
import java.util.UUID;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
|
import static org.junit.Assert.*;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
|
||||||
|
|
||||||
public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestDstu3Test.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestDstu3Test.class);
|
||||||
private RequestValidatingInterceptor myRequestValidatingInterceptor;
|
private RequestValidatingInterceptor myRequestValidatingInterceptor;
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() throws Exception {
|
||||||
|
super.after();
|
||||||
|
|
||||||
|
ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor);
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() throws Exception {
|
public void before() throws Exception {
|
||||||
super.before();
|
super.before();
|
||||||
|
@ -38,13 +48,6 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
myRequestValidatingInterceptor.addValidatorModule(module);
|
myRequestValidatingInterceptor.addValidatorModule(module);
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
|
||||||
public void after() throws Exception {
|
|
||||||
super.after();
|
|
||||||
|
|
||||||
ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMultithreadedSearch() throws Exception {
|
public void testMultithreadedSearch() throws Exception {
|
||||||
|
@ -75,7 +78,6 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test prevents a deadlock that was detected with a large number of
|
* This test prevents a deadlock that was detected with a large number of
|
||||||
* threads creating resources and blocking on the searchparamcache refreshing
|
* threads creating resources and blocking on the searchparamcache refreshing
|
||||||
|
@ -226,4 +228,5 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,6 +71,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test {
|
||||||
mySubscriber.setEmailSender(emailSender);
|
mySubscriber.setEmailSender(emailSender);
|
||||||
mySubscriber.setResourceDaos(myResourceDaos);
|
mySubscriber.setResourceDaos(myResourceDaos);
|
||||||
mySubscriber.setFhirContext(myFhirCtx);
|
mySubscriber.setFhirContext(myFhirCtx);
|
||||||
|
mySubscriber.setTxManager(ourTxManager);
|
||||||
mySubscriber.start();
|
mySubscriber.start();
|
||||||
ourRestServer.registerInterceptor(mySubscriber);
|
ourRestServer.registerInterceptor(mySubscriber);
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,9 @@ public class RestHookTestR4Test extends BaseResourceProviderR4Test {
|
||||||
@After
|
@After
|
||||||
public void afterUnregisterRestHookListener() {
|
public void afterUnregisterRestHookListener() {
|
||||||
for (IIdType next : mySubscriptionIds) {
|
for (IIdType next : mySubscriptionIds) {
|
||||||
ourClient.delete().resourceById(next).execute();
|
IIdType nextId = next.toUnqualifiedVersionless();
|
||||||
|
ourLog.info("Deleting: {}", nextId);
|
||||||
|
ourClient.delete().resourceById(nextId).execute();
|
||||||
}
|
}
|
||||||
mySubscriptionIds.clear();
|
mySubscriptionIds.clear();
|
||||||
|
|
||||||
|
|
|
@ -74,4 +74,24 @@ drop table trm_concept cascade constraints;
|
||||||
drop table trm_concept_pc_link cascade constraints;
|
drop table trm_concept_pc_link cascade constraints;
|
||||||
drop table trm_concept_property cascade constraints;
|
drop table trm_concept_property cascade constraints;
|
||||||
|
|
||||||
|
# Delete all resources
|
||||||
|
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource);
|
||||||
|
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_history_tag where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_res_ver where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_date where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_res_tag where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_res_param_present where res_id in (select res_id from hfj_resource);
|
||||||
|
delete from hfj_resource where res_id in (select res_id from hfj_resource);
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,13 @@
|
||||||
<![CDATA[<a href="http://hapifhir.io/doc_jpa.html">JPA Page</a>]]>
|
<![CDATA[<a href="http://hapifhir.io/doc_jpa.html">JPA Page</a>]]>
|
||||||
for more information.
|
for more information.
|
||||||
</action>
|
</action>
|
||||||
|
<action type="fix">
|
||||||
|
In certain cases in the JPA server, if multiple threads all attempted to
|
||||||
|
update the same resource simultaneously, the optimistic lock failure caused
|
||||||
|
a "gap" in the history numbers to occur. This would then cause a mysterious
|
||||||
|
failure when trying to update this resource further. This has been
|
||||||
|
resolved.
|
||||||
|
</action>
|
||||||
</release>
|
</release>
|
||||||
<release version="3.0.0" date="2017-09-27">
|
<release version="3.0.0" date="2017-09-27">
|
||||||
<action type="add">
|
<action type="add">
|
||||||
|
|
Loading…
Reference in New Issue