Clean up transactions in JPA
This commit is contained in:
parent
ee4ca9c934
commit
04fc0827ef
|
@ -70,6 +70,7 @@ public class FhirContext {
|
|||
* Default constructor. In most cases this is the right constructor to use.
|
||||
*/
|
||||
public FhirContext() {
|
||||
super();
|
||||
}
|
||||
|
||||
public FhirContext(Class<? extends IResource> theResourceType) {
|
||||
|
|
|
@ -69,6 +69,14 @@ public class ResourceReferenceDt
|
|||
super(theResource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder(this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("reference", getReference().getValueAsString());
|
||||
b.append("loaded", getResource()!=null);
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute
|
||||
* URL)
|
||||
|
|
|
@ -24,6 +24,7 @@ import static org.apache.commons.lang3.StringUtils.*;
|
|||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
|
@ -330,6 +331,27 @@ public class IdDt extends BasePrimitive<String> {
|
|||
}
|
||||
}
|
||||
|
||||
public IdDt unqualified() {
|
||||
return new IdDt(getResourceType(), getUnqualifiedId(), getUnqualifiedVersionId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this IdDt matches the given IdDt in terms of resource type and ID, but ignores the URL base
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public boolean equalsIgnoreBase(IdDt theId) {
|
||||
if (theId==null) {
|
||||
return false;
|
||||
}
|
||||
if (theId.isEmpty()) {
|
||||
return isEmpty();
|
||||
}
|
||||
return
|
||||
ObjectUtils.equals(getResourceType(),theId.getResourceType())
|
||||
&& ObjectUtils.equals(getUnqualifiedId(),theId.getUnqualifiedId())
|
||||
&& ObjectUtils.equals(getUnqualifiedVersionId(),theId.getUnqualifiedVersionId());
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -181,7 +181,9 @@ public class JsonParser extends BaseParser implements IParser {
|
|||
for (BundleEntry nextEntry : theBundle.getEntries()) {
|
||||
eventWriter.writeStartObject();
|
||||
|
||||
writeTagWithTextNode(eventWriter, "deleted", nextEntry.getDeletedAt());
|
||||
if (nextEntry.getDeletedAt() !=null&&nextEntry.getDeletedAt().isEmpty()==false) {
|
||||
writeTagWithTextNode(eventWriter, "deleted", nextEntry.getDeletedAt());
|
||||
}
|
||||
writeTagWithTextNode(eventWriter, "title", nextEntry.getTitle());
|
||||
writeTagWithTextNode(eventWriter, "id", nextEntry.getId());
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package ca.uhn.fhir.rest.gclient;
|
||||
|
||||
import ca.uhn.fhir.model.dstu.composite.IdentifierDt;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR Library
|
||||
|
@ -58,6 +60,11 @@ public class TokenParam implements IParam {
|
|||
public ICriterion identifier(String theIdentifier) {
|
||||
return new TokenCriterion(getParamName(), null, theIdentifier);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ICriterion identifier(IdentifierDt theIdentifier) {
|
||||
return new TokenCriterion(getParamName(), theIdentifier.getSystem().getValueAsString(), theIdentifier.getValue().getValue());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -101,6 +108,15 @@ public class TokenParam implements IParam {
|
|||
* @return A criterion
|
||||
*/
|
||||
ICriterion code(String theIdentifier);
|
||||
|
||||
/**
|
||||
* Creates a search criterion that matches against the given identifier (system and code if both are present, or whatever is present)
|
||||
*
|
||||
* @param theIdentifier
|
||||
* The identifier
|
||||
* @return A criterion
|
||||
*/
|
||||
ICriterion identifier(IdentifierDt theIdentifier);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.parser;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -24,6 +23,7 @@ import org.hamcrest.core.StringContains;
|
|||
import org.hamcrest.text.StringContainsInOrder;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.mockito.internal.matchers.Not;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.Bundle;
|
||||
|
@ -690,8 +690,14 @@ public class JsonParserTest {
|
|||
strings.addAll(Arrays.asList("\"id\":\"2\"", "\"rel\":\"alternate\"", "\"href\":\"http://foo/bar\""));
|
||||
strings.addAll(Arrays.asList("\"deleted\":\""+nowDt.getValueAsString()+"\"", "\"id\":\"Patient/3\""));
|
||||
assertThat(bundleString, StringContainsInOrder.stringContainsInOrder(strings));
|
||||
|
||||
b.getEntries().remove(2);
|
||||
bundleString = ourCtx.newJsonParser().setPrettyPrint(true).encodeBundleToString(b);
|
||||
assertThat(bundleString, not(containsString("deleted")));
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleBundleEncode() throws IOException {
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.text.Normalizer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -34,6 +35,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.entity.BaseTag;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceHistoryTag;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
|
||||
|
@ -44,6 +46,7 @@ import ca.uhn.fhir.jpa.entity.ResourceLink;
|
|||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.util.StopWatch;
|
||||
import ca.uhn.fhir.model.api.IDatatype;
|
||||
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
|
@ -73,7 +76,9 @@ import com.google.common.collect.Collections2;
|
|||
import com.google.common.collect.Lists;
|
||||
|
||||
public abstract class BaseFhirDao {
|
||||
private FhirContext myContext = new FhirContext();
|
||||
|
||||
@Autowired(required=true)
|
||||
private FhirContext myContext;
|
||||
|
||||
@PersistenceContext(name = "FHIR_UT", type = PersistenceContextType.TRANSACTION, unitName = "FHIR_UT")
|
||||
private EntityManager myEntityManager;
|
||||
|
@ -576,11 +581,16 @@ public abstract class BaseFhirDao {
|
|||
protected ArrayList<IResource> history(String theResourceName, Long theId, Date theSince, Integer theLimit) {
|
||||
List<HistoryTuple> tuples = new ArrayList<HistoryTuple>();
|
||||
|
||||
StopWatch timer = new StopWatch();
|
||||
|
||||
// Get list of IDs
|
||||
searchHistoryCurrentVersion(theResourceName, theId, theSince, theLimit, tuples);
|
||||
assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated());
|
||||
ourLog.info("Retrieved {} history IDs from current versions in {} ms", tuples.size(), timer.getMillisAndRestart());
|
||||
|
||||
searchHistoryHistory(theResourceName, theId, theSince, theLimit, tuples);
|
||||
assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated());
|
||||
ourLog.info("Retrieved {} history IDs from previous versions in {} ms", tuples.size(), timer.getMillisAndRestart());
|
||||
|
||||
// Sort merged list
|
||||
Collections.sort(tuples, Collections.reverseOrder());
|
||||
|
@ -588,8 +598,23 @@ public abstract class BaseFhirDao {
|
|||
|
||||
// Pull actual resources
|
||||
List<BaseHasResource> resEntities = Lists.newArrayList();
|
||||
|
||||
int limit;
|
||||
if (theLimit != null && theLimit < myConfig.getHardSearchLimit()) {
|
||||
limit = theLimit;
|
||||
} else {
|
||||
limit = myConfig.getHardSearchLimit();
|
||||
}
|
||||
|
||||
if (tuples.size() > limit) {
|
||||
tuples = tuples.subList(0, limit);
|
||||
}
|
||||
|
||||
searchHistoryCurrentVersion(tuples, resEntities);
|
||||
ourLog.info("Loaded history from current versions in {} ms", timer.getMillisAndRestart());
|
||||
|
||||
searchHistoryHistory(tuples, resEntities);
|
||||
ourLog.info("Loaded history from previous versions in {} ms", timer.getMillisAndRestart());
|
||||
|
||||
Collections.sort(resEntities, new Comparator<BaseHasResource>() {
|
||||
@Override
|
||||
|
@ -598,12 +623,6 @@ public abstract class BaseFhirDao {
|
|||
}
|
||||
});
|
||||
|
||||
int limit;
|
||||
if (theLimit != null && theLimit < myConfig.getHardSearchLimit()) {
|
||||
limit = theLimit;
|
||||
} else {
|
||||
limit = myConfig.getHardSearchLimit();
|
||||
}
|
||||
if (resEntities.size() > limit) {
|
||||
resEntities = resEntities.subList(0, limit);
|
||||
}
|
||||
|
@ -651,9 +670,22 @@ public abstract class BaseFhirDao {
|
|||
theEntity.setUpdated(new Date());
|
||||
|
||||
theEntity.setResourceType(toResourceName(theResource));
|
||||
theEntity.setResource(getContext().newJsonParser().encodeResourceToString(theResource));
|
||||
theEntity.setEncoding(EncodingEnum.JSON);
|
||||
|
||||
|
||||
String encoded = myConfig.getResourceEncoding().newParser(myContext).encodeResourceToString(theResource);
|
||||
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
|
||||
theEntity.setEncoding(encoding);
|
||||
try {
|
||||
switch (encoding) {
|
||||
case JSON:
|
||||
theEntity.setResource(encoded.getBytes("UTF-8"));
|
||||
break;
|
||||
case JSONC:
|
||||
theEntity.setResource(GZipUtil.compress(encoded));
|
||||
break;
|
||||
}
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
}
|
||||
|
||||
TagList tagList = (TagList) theResource.getResourceMetadata().get(ResourceMetadataKeyEnum.TAG_LIST);
|
||||
if (tagList != null) {
|
||||
for (Tag next : tagList) {
|
||||
|
@ -678,7 +710,20 @@ public abstract class BaseFhirDao {
|
|||
}
|
||||
|
||||
protected <T extends IResource> T toResource(Class<T> theResourceType, BaseHasResource theEntity) {
|
||||
String resourceText = theEntity.getResource();
|
||||
String resourceText=null;
|
||||
switch (theEntity.getEncoding()) {
|
||||
case JSON:
|
||||
try {
|
||||
resourceText = new String(theEntity.getResource(), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new Error("Should not happen", e);
|
||||
}
|
||||
break;
|
||||
case JSONC:
|
||||
resourceText = GZipUtil.decompress(theEntity.getResource());
|
||||
break;
|
||||
}
|
||||
|
||||
IParser parser = theEntity.getEncoding().newParser(getContext());
|
||||
T retVal = parser.parseResource(theResourceType, resourceText);
|
||||
retVal.setId(theEntity.getIdDt());
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
||||
|
||||
public class DaoConfig {
|
||||
|
||||
private int myHardSearchLimit = 1000;
|
||||
private int myHardTagListLimit = 1000;
|
||||
private ResourceEncodingEnum myResourceEncoding=ResourceEncodingEnum.JSONC;
|
||||
|
||||
public int getHardSearchLimit() {
|
||||
return myHardSearchLimit;
|
||||
|
@ -21,4 +24,12 @@ public class DaoConfig {
|
|||
myHardTagListLimit = theHardTagListLimit;
|
||||
}
|
||||
|
||||
public ResourceEncodingEnum getResourceEncoding() {
|
||||
return myResourceEncoding;
|
||||
}
|
||||
|
||||
public void setResourceEncoding(ResourceEncodingEnum theResourceEncoding) {
|
||||
myResourceEncoding = theResourceEncoding;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,15 +34,17 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
|
|||
@PersistenceContext()
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
private FhirContext myContext = new FhirContext();
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Override
|
||||
public void transaction(List<IResource> theResources) {
|
||||
ourLog.info("Beginning transaction with {} resources", theResources.size());
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
FhirTerser terser = myContext.newTerser();
|
||||
FhirTerser terser = getContext().newTerser();
|
||||
|
||||
int creations = 0;
|
||||
int updates = 0;
|
||||
|
||||
Map<IdDt, IdDt> idConversions = new HashMap<IdDt, IdDt>();
|
||||
List<ResourceTable> persistedResources = new ArrayList<ResourceTable>();
|
||||
for (IResource nextResource : theResources) {
|
||||
|
@ -72,6 +74,11 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
|
|||
entity = toEntity(nextResource);
|
||||
myEntityManager.persist(entity);
|
||||
myEntityManager.flush();
|
||||
creations++;
|
||||
ourLog.info("Resource Type[{}] with ID[{}] does not exist, creating it", resourceName, nextId);
|
||||
} else {
|
||||
updates++;
|
||||
ourLog.info("Resource Type[{}] with ID[{}] exists, updating it", resourceName, nextId);
|
||||
}
|
||||
|
||||
IdDt newId = new IdDt(resourceName + '/' + entity.getId());
|
||||
|
@ -80,8 +87,11 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
|
|||
} else if (newId.equals(entity.getId())) {
|
||||
ourLog.info("Transaction resource ID[{}] is being updated", newId);
|
||||
} else {
|
||||
ourLog.info("Transaction resource ID[{}] has been assigned new ID[{}]", nextId, newId);
|
||||
idConversions.put(nextId, newId);
|
||||
if (!nextId.getUnqualifiedId().startsWith("#")) {
|
||||
nextId = new IdDt(resourceName + '/' + nextId.getUnqualifiedId());
|
||||
ourLog.info("Transaction resource ID[{}] has been assigned new ID[{}]", nextId, newId);
|
||||
idConversions.put(nextId, newId);
|
||||
}
|
||||
}
|
||||
|
||||
persistedResources.add(entity);
|
||||
|
@ -97,7 +107,7 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
|
|||
ourLog.info(" * Replacing resource ref {} with {}", nextId, newId);
|
||||
nextRef.setReference(newId);
|
||||
} else {
|
||||
ourLog.info(" * Reference [{}] does not exist in bundle", nextId);
|
||||
ourLog.debug(" * Reference [{}] does not exist in bundle", nextId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -107,6 +117,9 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
|
|||
ResourceTable table = persistedResources.get(i);
|
||||
updateEntity(resource, table, table.getId() != null);
|
||||
}
|
||||
|
||||
long delay = System.currentTimeMillis() - start;
|
||||
ourLog.info("Transaction completed in {}ms with {} creations and {} updates", new Object[] {delay, creations, updates});
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
|
||||
public class GZipUtil {
|
||||
|
||||
public static String decompress(byte[] theResource) {
|
||||
GZIPInputStream is;
|
||||
try {
|
||||
is = new GZIPInputStream(new ByteArrayInputStream(theResource));
|
||||
return IOUtils.toString(is, "UTF-8");
|
||||
} catch (IOException e) {
|
||||
throw new DataFormatException("Failed to decompress contents", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] compress(String theEncoded) {
|
||||
try {
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
GZIPOutputStream gos = new GZIPOutputStream(os);
|
||||
IOUtils.write(theEncoded, gos, "UTF-8");
|
||||
gos.close();
|
||||
os.close();
|
||||
byte[] retVal = os.toByteArray();
|
||||
return retVal;
|
||||
} catch (IOException e) {
|
||||
throw new DataFormatException("Compress contents", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -13,14 +13,13 @@ import javax.persistence.TemporalType;
|
|||
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.server.EncodingEnum;
|
||||
|
||||
@MappedSuperclass
|
||||
public abstract class BaseHasResource {
|
||||
|
||||
@Column(name = "RES_ENCODING", nullable = false, length=4)
|
||||
@Column(name = "RES_ENCODING", nullable = false, length=5)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private EncodingEnum myEncoding;
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_PUBLISHED", nullable = false)
|
||||
|
@ -28,13 +27,13 @@ public abstract class BaseHasResource {
|
|||
|
||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
|
||||
@Lob()
|
||||
private String myResource;
|
||||
private byte[] myResource;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_UPDATED", nullable = false)
|
||||
private Date myUpdated;
|
||||
|
||||
public EncodingEnum getEncoding() {
|
||||
public ResourceEncodingEnum getEncoding() {
|
||||
return myEncoding;
|
||||
}
|
||||
|
||||
|
@ -48,7 +47,7 @@ public abstract class BaseHasResource {
|
|||
return new InstantDt(myPublished);
|
||||
}
|
||||
|
||||
public String getResource() {
|
||||
public byte[] getResource() {
|
||||
return myResource;
|
||||
}
|
||||
|
||||
|
@ -58,7 +57,7 @@ public abstract class BaseHasResource {
|
|||
|
||||
public abstract long getVersion();
|
||||
|
||||
public void setEncoding(EncodingEnum theEncoding) {
|
||||
public void setEncoding(ResourceEncodingEnum theEncoding) {
|
||||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
|
@ -70,7 +69,7 @@ public abstract class BaseHasResource {
|
|||
myPublished = thePublished.getValue();
|
||||
}
|
||||
|
||||
public void setResource(String theResource) {
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
|
||||
public enum ResourceEncodingEnum {
|
||||
|
||||
/** Json */
|
||||
JSON,
|
||||
|
||||
/** Json Compressed */
|
||||
JSONC;
|
||||
|
||||
public IParser newParser(FhirContext theContext) {
|
||||
return theContext.newJsonParser();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
|
||||
public class StopWatch {
|
||||
|
||||
private long myStarted = System.currentTimeMillis();
|
||||
|
||||
public long getMillisAndRestart() {
|
||||
long now = System.currentTimeMillis();
|
||||
long retVal = now - myStarted;
|
||||
myStarted = now;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
|
@ -5,6 +5,7 @@ import static org.junit.Assert.*;
|
|||
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -163,6 +164,50 @@ public class FhirSystemDaoTest {
|
|||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithUpdate() throws Exception {
|
||||
List<IResource> res = new ArrayList<IResource>();
|
||||
|
||||
Patient p1 = new Patient();
|
||||
p1.getId().setValue("testTransactionWithUpdateXXX01");
|
||||
p1.addIdentifier("system", "testTransactionWithUpdate01");
|
||||
res.add(p1);
|
||||
|
||||
Observation p2 = new Observation();
|
||||
p2.getId().setValue("testTransactionWithUpdateXXX02");
|
||||
p2.getIdentifier().setSystem("system").setValue("testTransactionWithUpdate02");
|
||||
p2.setSubject(new ResourceReferenceDt("Patient/testTransactionWithUpdateXXX01"));
|
||||
res.add(p2);
|
||||
|
||||
ourSystemDao.transaction(res);
|
||||
|
||||
assertFalse(p1.getId().isEmpty());
|
||||
assertNotEquals("testTransactionWithUpdateXXX01", p1.getId().getUnqualifiedVersionId());
|
||||
assertFalse(p2.getId().isEmpty());
|
||||
assertNotEquals("testTransactionWithUpdateXXX02", p2.getId().getUnqualifiedVersionId());
|
||||
assertEquals(p1.getId().unqualified().withoutVersion(), p2.getSubject().getReference());
|
||||
|
||||
IdDt p1id = p1.getId().unqualified().withoutVersion();
|
||||
IdDt p1idWithVer = p1.getId().unqualified();
|
||||
IdDt p2id = p2.getId().unqualified().withoutVersion();
|
||||
IdDt p2idWithVer = p2.getId().unqualified();
|
||||
|
||||
p1.addName().addFamily("Name1");
|
||||
p1.setId(p1.getId().unqualified().withoutVersion());
|
||||
|
||||
p2.addReferenceRange().setHigh(123L);
|
||||
p2.setId(p2.getId().unqualified().withoutVersion());
|
||||
|
||||
ourSystemDao.transaction(res);
|
||||
|
||||
assertEquals(p1id, p1.getId().unqualified().withoutVersion());
|
||||
assertEquals(p2id, p2.getId().unqualified().withoutVersion());
|
||||
assertNotEquals(p1idWithVer, p1.getId().unqualified());
|
||||
assertNotEquals(p2idWithVer, p2.getId().unqualified());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testTransactionFromBundle() throws Exception {
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
<property name="hibernate.connection.password" value="" />
|
||||
<property name="hibernate.jdbc.batch_size" value="0" />
|
||||
<property name="hibernate.cache.use_minimal_puts" value="false" />
|
||||
<property name="hibernate.show_sql" value="true" />
|
||||
<property name="hibernate.cache.use_query_cache" value="false" />
|
||||
<property name="hibernate.cache.use_second_level_cache" value="false" />
|
||||
<property name="hibernate.cache.use_structured_entries" value="false" />
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
<context:annotation-config />
|
||||
<context:mbean-server />
|
||||
|
||||
<bean class="ca.uhn.fhir.context.FhirContext"></bean>
|
||||
|
||||
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
|
||||
</bean>
|
||||
|
||||
|
@ -63,7 +65,7 @@
|
|||
<property name="persistenceUnitName" value="FHIR_UT" />
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true" />
|
||||
<property name="showSql" value="false" />
|
||||
<property name="generateDdl" value="true" />
|
||||
<!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" /> -->
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.DerbyTenSevenDialect" />
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
<context:annotation-config />
|
||||
<context:mbean-server />
|
||||
|
||||
<bean class="ca.uhn.fhir.context.FhirContext"></bean>
|
||||
|
||||
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
|
||||
</bean>
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
<context:annotation-config />
|
||||
<context:mbean-server />
|
||||
|
||||
<!-- <import resource="classpath:hapi-jpaserver-springbeans.xml" /> -->
|
||||
<bean class="ca.uhn.fhir.context.FhirContext"></bean>
|
||||
|
||||
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
|
||||
</bean>
|
||||
|
|
|
@ -404,6 +404,16 @@ public class ${className}
|
|||
return null;
|
||||
}
|
||||
#end
|
||||
#if ( ${className} == "ResourceReferenceDt" )
|
||||
@Override
|
||||
public String toString() {
|
||||
org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder(this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("reference", getReference().getValueAsString());
|
||||
b.append("loaded", getResource()!=null);
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
#end
|
||||
#childExtensionTypes( $childExtensionTypes )
|
||||
|
||||
}
|
Loading…
Reference in New Issue