Add an optimistic lock to the ResourceTable
This commit is contained in:
parent
ff85503acb
commit
eb2787d30c
|
@ -1011,14 +1011,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||
changed = true;
|
||||
}
|
||||
|
||||
if (theResource instanceof IResource) {
|
||||
String title = ResourceMetadataKeyEnum.TITLE.get((IResource) theResource);
|
||||
if (title != null && title.length() > BaseHasResource.MAX_TITLE_LENGTH) {
|
||||
title = title.substring(0, BaseHasResource.MAX_TITLE_LENGTH);
|
||||
}
|
||||
theEntity.setTitle(title);
|
||||
}
|
||||
|
||||
return changed;
|
||||
}
|
||||
|
||||
|
@ -1052,10 +1044,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
|
||||
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
||||
|
||||
if (theEntity.getTitle() != null) {
|
||||
ResourceMetadataKeyEnum.TITLE.put(res, theEntity.getTitle());
|
||||
}
|
||||
|
||||
Collection<? extends BaseTag> tags = theEntity.getTags();
|
||||
if (theEntity.isHasTags()) {
|
||||
TagList tagList = new TagList();
|
||||
|
|
|
@ -20,52 +20,54 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
|
||||
import javax.persistence.*;
|
||||
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
|
||||
@MappedSuperclass
|
||||
public abstract class BaseHasResource {
|
||||
|
||||
public static final int MAX_TITLE_LENGTH = 100;
|
||||
|
||||
@Column(name = "RES_DELETED_AT", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myDeleted;
|
||||
|
||||
@Column(name = "RES_ENCODING", nullable = false, length = 5)
|
||||
@Enumerated(EnumType.STRING)
|
||||
@OptimisticLock(excluded = true)
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
|
||||
@Column(name = "RES_VERSION", nullable = true, length = 7)
|
||||
@Enumerated(EnumType.STRING)
|
||||
@OptimisticLock(excluded = true)
|
||||
private FhirVersionEnum myFhirVersion;
|
||||
|
||||
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false)
|
||||
@JoinColumn(name = "FORCED_ID_PID")
|
||||
@OptimisticLock(excluded = true)
|
||||
private ForcedId myForcedId;
|
||||
|
||||
@Column(name = "HAS_TAGS", nullable = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myHasTags;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_PUBLISHED", nullable = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Date myPublished;
|
||||
|
||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
|
||||
@Lob()
|
||||
@OptimisticLock(excluded = true)
|
||||
private byte[] myResource;
|
||||
|
||||
@Column(name = "RES_TITLE", nullable = true, length = MAX_TITLE_LENGTH)
|
||||
private String myTitle;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_UPDATED", nullable = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Date myUpdated;
|
||||
|
||||
public abstract BaseTag addTag(TagDefinition theDef);
|
||||
|
@ -74,18 +76,36 @@ public abstract class BaseHasResource {
|
|||
return myDeleted;
|
||||
}
|
||||
|
||||
public void setDeleted(Date theDate) {
|
||||
myDeleted = theDate;
|
||||
}
|
||||
|
||||
public ResourceEncodingEnum getEncoding() {
|
||||
return myEncoding;
|
||||
}
|
||||
|
||||
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
public FhirVersionEnum getFhirVersion() {
|
||||
return myFhirVersion;
|
||||
}
|
||||
|
||||
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
|
||||
myFhirVersion = theFhirVersion;
|
||||
}
|
||||
|
||||
public ForcedId getForcedId() {
|
||||
return myForcedId;
|
||||
}
|
||||
|
||||
public void setForcedId(ForcedId theForcedId) {
|
||||
myForcedId = theForcedId;
|
||||
}
|
||||
|
||||
public abstract Long getId();
|
||||
|
||||
public abstract IdDt getIdDt();
|
||||
|
||||
public InstantDt getPublished() {
|
||||
|
@ -96,22 +116,30 @@ public abstract class BaseHasResource {
|
|||
}
|
||||
}
|
||||
|
||||
public void setPublished(InstantDt thePublished) {
|
||||
myPublished = thePublished.getValue();
|
||||
}
|
||||
|
||||
public byte[] getResource() {
|
||||
return myResource;
|
||||
}
|
||||
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public abstract String getResourceType();
|
||||
|
||||
public abstract Collection<? extends BaseTag> getTags();
|
||||
|
||||
public String getTitle() {
|
||||
return myTitle;
|
||||
}
|
||||
|
||||
public InstantDt getUpdated() {
|
||||
return new InstantDt(myUpdated);
|
||||
}
|
||||
|
||||
public void setUpdated(InstantDt theUpdated) {
|
||||
myUpdated = theUpdated.getValue();
|
||||
}
|
||||
|
||||
public Date getUpdatedDate() {
|
||||
return myUpdated;
|
||||
}
|
||||
|
@ -122,24 +150,6 @@ public abstract class BaseHasResource {
|
|||
return myHasTags;
|
||||
}
|
||||
|
||||
public void setDeleted(Date theDate) {
|
||||
myDeleted = theDate;
|
||||
}
|
||||
|
||||
public abstract Long getId();
|
||||
|
||||
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
|
||||
myFhirVersion = theFhirVersion;
|
||||
}
|
||||
|
||||
public void setForcedId(ForcedId theForcedId) {
|
||||
myForcedId = theForcedId;
|
||||
}
|
||||
|
||||
public void setHasTags(boolean theHasTags) {
|
||||
myHasTags = theHasTags;
|
||||
}
|
||||
|
@ -148,24 +158,8 @@ public abstract class BaseHasResource {
|
|||
myPublished = thePublished;
|
||||
}
|
||||
|
||||
public void setPublished(InstantDt thePublished) {
|
||||
myPublished = thePublished.getValue();
|
||||
}
|
||||
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public void setTitle(String theTitle) {
|
||||
myTitle = theTitle;
|
||||
}
|
||||
|
||||
public void setUpdated(Date theUpdated) {
|
||||
myUpdated = theUpdated;
|
||||
}
|
||||
|
||||
public void setUpdated(InstantDt theUpdated) {
|
||||
myUpdated = theUpdated.getValue();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
|||
import org.apache.lucene.analysis.standard.StandardFilterFactory;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
||||
import org.hibernate.annotations.ColumnDefault;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
import org.hibernate.search.annotations.*;
|
||||
import org.hibernate.search.annotations.Parameter;
|
||||
|
||||
|
@ -124,12 +125,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
@Field(name = "myContentTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||
@Field(name = "myContentTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||
})
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myContentText;
|
||||
|
||||
@Column(name = "HASH_SHA256", length = 64, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myHashSha256;
|
||||
|
||||
@Column(name = "SP_HAS_LINKS")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myHasLinks;
|
||||
|
||||
@Id
|
||||
|
@ -139,12 +143,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
private Long myId;
|
||||
|
||||
@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceLink> myIncomingResourceLinks;
|
||||
|
||||
@Column(name = "SP_INDEX_STATUS", nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Long myIndexStatus;
|
||||
|
||||
@Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myLanguage;
|
||||
|
||||
/**
|
||||
|
@ -157,69 +164,100 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
@Field(name = "myNarrativeTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||
@Field(name = "myNarrativeTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||
})
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myNarrativeText;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
|
||||
|
||||
@Column(name = "SP_COORDS_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsCoordsPopulated;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamDate> myParamsDate;
|
||||
|
||||
@Column(name = "SP_DATE_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsDatePopulated;
|
||||
|
||||
@OptimisticLock(excluded = true)
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
private Collection<ResourceIndexedSearchParamNumber> myParamsNumber;
|
||||
|
||||
@Column(name = "SP_NUMBER_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsNumberPopulated;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity;
|
||||
|
||||
@Column(name = "SP_QUANTITY_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsQuantityPopulated;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamString> myParamsString;
|
||||
|
||||
@Column(name = "SP_STRING_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsStringPopulated;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamToken> myParamsToken;
|
||||
|
||||
@Column(name = "SP_TOKEN_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsTokenPopulated;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedSearchParamUri> myParamsUri;
|
||||
|
||||
@Column(name = "SP_URI_PRESENT")
|
||||
@OptimisticLock(excluded = true)
|
||||
private boolean myParamsUriPopulated;
|
||||
|
||||
@Column(name = "RES_PROFILE", length = MAX_PROFILE_LENGTH, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myProfile;
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
private Collection<ResourceIndexedCompositeStringUnique> myParamsCompositeStringUnique;
|
||||
|
||||
// Added in 3.0.0 - Should make this a primitive Boolean at some point
|
||||
@OptimisticLock(excluded = true)
|
||||
@Column(name = "SP_CMPSTR_UNIQ_PRESENT")
|
||||
private Boolean myParamsCompositeStringUniquePresent = false;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceIndexedCompositeStringUnique> myParamsCompositeStringUnique;
|
||||
|
||||
@OneToMany(mappedBy = "mySourceResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||
@IndexedEmbedded()
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceLink> myResourceLinks;
|
||||
|
||||
@Column(name = "RES_TYPE", length = RESTYPE_LEN)
|
||||
@Field
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceType;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Collection<SearchParamPresent> mySearchParamPresents;
|
||||
|
||||
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Set<ResourceTag> myTags;
|
||||
|
||||
@Transient
|
||||
private transient boolean myUnchangedInCurrentOperation;
|
||||
|
||||
@Version
|
||||
@Column(name = "RES_VER")
|
||||
private long myVersion;
|
||||
|
||||
|
@ -555,7 +593,6 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||
retVal.setResourceType(myResourceType);
|
||||
retVal.setVersion(myVersion);
|
||||
|
||||
retVal.setTitle(getTitle());
|
||||
retVal.setPublished(getPublished());
|
||||
retVal.setUpdated(getUpdated());
|
||||
retVal.setEncoding(getEncoding());
|
||||
|
|
|
@ -8,6 +8,7 @@ import java.util.concurrent.TimeUnit;
|
|||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.springframework.context.annotation.*;
|
||||
|
@ -33,9 +34,9 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
|||
public DaoConfig daoConfig() {
|
||||
return new DaoConfig();
|
||||
}
|
||||
|
||||
|
||||
@Bean()
|
||||
public DataSource dataSource() {
|
||||
public BasicDataSource basicDataSource() {
|
||||
BasicDataSource retVal = new BasicDataSource() {
|
||||
|
||||
|
||||
|
@ -48,36 +49,36 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
|||
ourLog.error("Exceeded maximum wait for connection", e);
|
||||
logGetConnectionStackTrace();
|
||||
// if ("true".equals(System.getProperty("ci"))) {
|
||||
fail("Exceeded maximum wait for connection: "+ e.toString());
|
||||
fail("Exceeded maximum wait for connection: "+ e.toString());
|
||||
// }
|
||||
// System.exit(1);
|
||||
retVal = null;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
throw new Exception();
|
||||
} catch (Exception e) {
|
||||
myLastStackTrace = e;
|
||||
}
|
||||
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void logGetConnectionStackTrace() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Last connection request stack trace:");
|
||||
for (StackTraceElement next : myLastStackTrace.getStackTrace()) {
|
||||
b.append("\n ");
|
||||
b.append(next.getClassName());
|
||||
b.append(".");
|
||||
b.append(next.getMethodName());
|
||||
b.append("(");
|
||||
b.append(next.getFileName());
|
||||
b.append(":");
|
||||
b.append(next.getLineNumber());
|
||||
b.append(")");
|
||||
}
|
||||
ourLog.info(b.toString());
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Last connection request stack trace:");
|
||||
for (StackTraceElement next : myLastStackTrace.getStackTrace()) {
|
||||
b.append("\n ");
|
||||
b.append(next.getClassName());
|
||||
b.append(".");
|
||||
b.append(next.getMethodName());
|
||||
b.append("(");
|
||||
b.append(next.getFileName());
|
||||
b.append(":");
|
||||
b.append(next.getLineNumber());
|
||||
b.append(")");
|
||||
}
|
||||
ourLog.info(b.toString());
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -95,10 +96,17 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
|||
int maxThreads = (int) (Math.random() * 6) + 1;
|
||||
retVal.setMaxTotal(maxThreads);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean()
|
||||
@Primary()
|
||||
public DataSource dataSource() {
|
||||
|
||||
DataSource dataSource = ProxyDataSourceBuilder
|
||||
.create(retVal)
|
||||
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||
.logSlowQueryBySlf4j(100, TimeUnit.MILLISECONDS)
|
||||
.create(basicDataSource())
|
||||
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||
// .logSlowQueryBySlf4j(100, TimeUnit.MILLISECONDS)
|
||||
.countQuery()
|
||||
.build();
|
||||
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.phloc.commons.compare.ReverseComparator;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.Organization;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.util.comparator.ComparableComparator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class FhirDaoConcurrencyDstu3Test extends BaseJpaDstu3SystemTest {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirDaoConcurrencyDstu3Test.class);
|
||||
|
||||
@Autowired
|
||||
public BasicDataSource myBasicDataSource;
|
||||
private int myMaxTotal;
|
||||
|
||||
@After
|
||||
public void afterResetConnectionPool() {
|
||||
myBasicDataSource.setMaxTotal(myMaxTotal);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeSetUpConnectionPool() {
|
||||
myMaxTotal = myBasicDataSource.getMaxTotal();
|
||||
myBasicDataSource.setMaxTotal(5);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleConcurrentWritesToSameResource() throws InterruptedException {
|
||||
|
||||
ThreadPoolExecutor exec = new ThreadPoolExecutor(10, 10,
|
||||
0L, TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingQueue<Runnable>());
|
||||
|
||||
final AtomicInteger errors = new AtomicInteger();
|
||||
|
||||
List<Future> futures = new ArrayList<>();
|
||||
for (int i = 0; i < 50; i++) {
|
||||
final Patient p = new Patient();
|
||||
p.setId("PID");
|
||||
p.setActive(true);
|
||||
p.setBirthDate(new Date());
|
||||
p.addIdentifier().setSystem("foo1");
|
||||
p.addIdentifier().setSystem("foo2");
|
||||
p.addIdentifier().setSystem("foo3");
|
||||
p.addIdentifier().setSystem("foo4");
|
||||
p.addName().setFamily("FOO" + i);
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB1");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB2");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB3");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB4");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB5");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB6");
|
||||
|
||||
Organization o = new Organization();
|
||||
o.setName("ORG" + i);
|
||||
|
||||
final Bundle t = new Bundle();
|
||||
t.setType(BundleType.TRANSACTION);
|
||||
t.addEntry()
|
||||
.setResource(p)
|
||||
.getRequest()
|
||||
.setUrl("Patient/PID")
|
||||
.setMethod(HTTPVerb.PUT);
|
||||
t.addEntry()
|
||||
.setResource(o)
|
||||
.getRequest()
|
||||
.setUrl("Organization")
|
||||
.setMethod(HTTPVerb.POST);
|
||||
|
||||
if (i == 0) {
|
||||
mySystemDao.transaction(mySrd, t);
|
||||
}
|
||||
futures.add(exec.submit(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
mySystemDao.transaction(mySrd, t);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failed to update", e);
|
||||
errors.incrementAndGet();
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
ourLog.info("Shutting down excutor");
|
||||
StopWatch sw = new StopWatch();
|
||||
for (Future next : futures) {
|
||||
while (!next.isDone()) {
|
||||
Thread.sleep(20);
|
||||
}
|
||||
}
|
||||
exec.shutdown();
|
||||
ourLog.info("Shut down excutor in {}ms", sw.getMillis());
|
||||
ourLog.info("Had {} errors", errors.get());
|
||||
|
||||
Patient currentPatient = myPatientDao.read(new IdType("Patient/PID"));
|
||||
Long currentVersion = currentPatient.getIdElement().getVersionIdPartAsLong();
|
||||
ourLog.info("Current version: {}", currentVersion);
|
||||
|
||||
IBundleProvider historyBundle = myPatientDao.history(new IdType("Patient/PID"),null,null,mySrd);
|
||||
List<IBaseResource> resources = historyBundle.getResources(0, 1000);
|
||||
List<Long> versions = new ArrayList<>();
|
||||
for (IBaseResource next : resources) {
|
||||
versions.add(next.getIdElement().getVersionIdPartAsLong());
|
||||
}
|
||||
|
||||
String message = "Current version is " + currentVersion + " - History is: " + versions;
|
||||
ourLog.info(message);
|
||||
|
||||
Collections.sort(versions, new ReverseComparator<>(new ComparableComparator<Long>()));
|
||||
Long lastVersion = versions.get(0);
|
||||
ourLog.info("Last version: {}", lastVersion);
|
||||
|
||||
//assertEquals(message, currentVersion.intValue(), versions.size());
|
||||
assertEquals(message, currentVersion, lastVersion);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -252,6 +252,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
p.setId("Patient/A");
|
||||
String id = myPatientDao.update(p).getId().getValue();
|
||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||
|
||||
// Second time should not result in an update
|
||||
p = new Patient();
|
||||
|
@ -259,6 +260,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
p.setId("Patient/A");
|
||||
id = myPatientDao.update(p).getId().getValue();
|
||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||
|
||||
// And third time should not result in an update
|
||||
p = new Patient();
|
||||
|
@ -266,6 +268,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
p.setId("Patient/A");
|
||||
id = myPatientDao.update(p).getId().getValue();
|
||||
assertThat(id, endsWith("Patient/A/_history/1"));
|
||||
assertEquals("1", myPatientDao.read(new IdType("Patient/A")).getIdElement().getVersionIdPart());
|
||||
|
||||
myPatientDao.read(new IdType("Patient/A"));
|
||||
myPatientDao.read(new IdType("Patient/A/_history/1"));
|
||||
|
|
|
@ -8,7 +8,6 @@ import ca.uhn.fhir.jpa.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||
import ca.uhn.fhir.jpa.util.StopWatch;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -38,12 +37,9 @@ import java.io.InputStream;
|
|||
import java.io.UnsupportedEncodingException;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -2499,7 +2495,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
|||
IdType medOrderId1 = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
|
||||
/*
|
||||
* Again!
|
||||
* Again!
|
||||
*/
|
||||
|
||||
bundle = new Bundle();
|
||||
|
@ -2823,91 +2819,6 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMultipleConcurrentWritesToSameResource() throws InterruptedException {
|
||||
|
||||
ThreadPoolExecutor exec = new ThreadPoolExecutor(10, 10,
|
||||
0L, TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingQueue<Runnable>());
|
||||
|
||||
final AtomicInteger errors = new AtomicInteger();
|
||||
|
||||
List<Future> futures = new ArrayList<>();
|
||||
for (int i = 0; i < 50; i++) {
|
||||
final Patient p = new Patient();
|
||||
p.setId("PID");
|
||||
p.setActive(true);
|
||||
p.setBirthDate(new Date());
|
||||
p.addIdentifier().setSystem("foo1");
|
||||
p.addIdentifier().setSystem("foo2");
|
||||
p.addIdentifier().setSystem("foo3");
|
||||
p.addIdentifier().setSystem("foo4");
|
||||
p.addName().setFamily("FOO" + i);
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB1");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB2");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB3");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB4");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB5");
|
||||
p.addName().addGiven("AAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBB6");
|
||||
|
||||
Organization o = new Organization();
|
||||
o.setName("ORG" + i);
|
||||
|
||||
final Bundle t = new Bundle();
|
||||
t.setType(BundleType.TRANSACTION);
|
||||
t.addEntry()
|
||||
.setResource(p)
|
||||
.getRequest()
|
||||
.setUrl("Patient/PID")
|
||||
.setMethod(HTTPVerb.PUT);
|
||||
t.addEntry()
|
||||
.setResource(o)
|
||||
.getRequest()
|
||||
.setUrl("Organization")
|
||||
.setMethod(HTTPVerb.POST);
|
||||
|
||||
if (i == 0) {
|
||||
mySystemDao.transaction(mySrd, t);
|
||||
}
|
||||
futures.add(exec.submit(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
mySystemDao.transaction(mySrd, t);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failed to update", e);
|
||||
errors.incrementAndGet();
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
ourLog.info("Shutting down excutor");
|
||||
StopWatch sw = new StopWatch();
|
||||
for (Future next : futures) {
|
||||
while (!next.isDone()) {
|
||||
Thread.sleep(20);
|
||||
}
|
||||
}
|
||||
exec.shutdown();
|
||||
ourLog.info("Shut down excutor in {}ms", sw.getMillis());
|
||||
ourLog.info("Had {} errors", errors.get());
|
||||
|
||||
Patient currentPatient = myPatientDao.read(new IdType("Patient/PID"));
|
||||
Long currentVersion = currentPatient.getIdElement().getVersionIdPartAsLong();
|
||||
ourLog.info("Current version: {}", currentVersion);
|
||||
|
||||
IBundleProvider historyBundle = myPatientDao.history(new IdType("Patient/PID"),null,null,mySrd);
|
||||
Patient lastPatient = (Patient) historyBundle.getResources(0,1).get(0);
|
||||
Long lastVersion = lastPatient.getIdElement().getVersionIdPartAsLong();
|
||||
ourLog.info("Last version: {}", lastVersion);
|
||||
|
||||
assertEquals(currentVersion, lastVersion);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
|
@ -37,6 +37,13 @@
|
|||
<![CDATA[<a href="http://hapifhir.io/doc_jpa.html">JPA Page</a>]]>
|
||||
for more information.
|
||||
</action>
|
||||
<action type="fix">
|
||||
In certain cases in the JPA server, if multiple threads all attempted to
|
||||
update the same resource simultaneously, the optimistic lock failure caused
|
||||
a "gap" in the history numbers to occur. This would then cause a mysterious
|
||||
failure when trying to update this resource further. This has been
|
||||
resolved.
|
||||
</action>
|
||||
</release>
|
||||
<release version="3.0.0" date="2017-09-27">
|
||||
<action type="add">
|
||||
|
|
Loading…
Reference in New Issue