Clean up transactions in JPA

This commit is contained in:
jamesagnew 2014-06-06 17:10:31 -04:00
parent ee4ca9c934
commit 04fc0827ef
19 changed files with 284 additions and 31 deletions

View File

@ -70,6 +70,7 @@ public class FhirContext {
* Default constructor. In most cases this is the right constructor to use. * Default constructor. In most cases this is the right constructor to use.
*/ */
public FhirContext() { public FhirContext() {
super();
} }
public FhirContext(Class<? extends IResource> theResourceType) { public FhirContext(Class<? extends IResource> theResourceType) {

View File

@ -69,6 +69,14 @@ public class ResourceReferenceDt
super(theResource); super(theResource);
} }
@Override
public String toString() {
org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder(this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE);
b.append("reference", getReference().getValueAsString());
b.append("loaded", getResource()!=null);
return b.toString();
}
/** /**
* Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute
* URL) * URL)

View File

@ -24,6 +24,7 @@ import static org.apache.commons.lang3.StringUtils.*;
import java.math.BigDecimal; import java.math.BigDecimal;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
@ -330,6 +331,27 @@ public class IdDt extends BasePrimitive<String> {
} }
} }
public IdDt unqualified() {
return new IdDt(getResourceType(), getUnqualifiedId(), getUnqualifiedVersionId());
}
/**
* Returns true if this IdDt matches the given IdDt in terms of resource type and ID, but ignores the URL base
*/
@SuppressWarnings("deprecation")
public boolean equalsIgnoreBase(IdDt theId) {
if (theId==null) {
return false;
}
if (theId.isEmpty()) {
return isEmpty();
}
return
ObjectUtils.equals(getResourceType(),theId.getResourceType())
&& ObjectUtils.equals(getUnqualifiedId(),theId.getUnqualifiedId())
&& ObjectUtils.equals(getUnqualifiedVersionId(),theId.getUnqualifiedVersionId());
}
} }

View File

@ -181,7 +181,9 @@ public class JsonParser extends BaseParser implements IParser {
for (BundleEntry nextEntry : theBundle.getEntries()) { for (BundleEntry nextEntry : theBundle.getEntries()) {
eventWriter.writeStartObject(); eventWriter.writeStartObject();
writeTagWithTextNode(eventWriter, "deleted", nextEntry.getDeletedAt()); if (nextEntry.getDeletedAt() !=null&&nextEntry.getDeletedAt().isEmpty()==false) {
writeTagWithTextNode(eventWriter, "deleted", nextEntry.getDeletedAt());
}
writeTagWithTextNode(eventWriter, "title", nextEntry.getTitle()); writeTagWithTextNode(eventWriter, "title", nextEntry.getTitle());
writeTagWithTextNode(eventWriter, "id", nextEntry.getId()); writeTagWithTextNode(eventWriter, "id", nextEntry.getId());

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.rest.gclient; package ca.uhn.fhir.rest.gclient;
import ca.uhn.fhir.model.dstu.composite.IdentifierDt;
/* /*
* #%L * #%L
* HAPI FHIR Library * HAPI FHIR Library
@ -58,6 +60,11 @@ public class TokenParam implements IParam {
public ICriterion identifier(String theIdentifier) { public ICriterion identifier(String theIdentifier) {
return new TokenCriterion(getParamName(), null, theIdentifier); return new TokenCriterion(getParamName(), null, theIdentifier);
} }
@Override
public ICriterion identifier(IdentifierDt theIdentifier) {
return new TokenCriterion(getParamName(), theIdentifier.getSystem().getValueAsString(), theIdentifier.getValue().getValue());
}
}; };
} }
@ -101,6 +108,15 @@ public class TokenParam implements IParam {
* @return A criterion * @return A criterion
*/ */
ICriterion code(String theIdentifier); ICriterion code(String theIdentifier);
/**
* Creates a search criterion that matches against the given identifier (system and code if both are present, or whatever is present)
*
* @param theIdentifier
* The identifier
* @return A criterion
*/
ICriterion identifier(IdentifierDt theIdentifier);
} }
} }

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.parser; package ca.uhn.fhir.parser;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.stringContainsInOrder;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.mockito.Matchers.eq; import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -24,6 +23,7 @@ import org.hamcrest.core.StringContains;
import org.hamcrest.text.StringContainsInOrder; import org.hamcrest.text.StringContainsInOrder;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.mockito.internal.matchers.Not;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.Bundle; import ca.uhn.fhir.model.api.Bundle;
@ -691,7 +691,13 @@ public class JsonParserTest {
strings.addAll(Arrays.asList("\"deleted\":\""+nowDt.getValueAsString()+"\"", "\"id\":\"Patient/3\"")); strings.addAll(Arrays.asList("\"deleted\":\""+nowDt.getValueAsString()+"\"", "\"id\":\"Patient/3\""));
assertThat(bundleString, StringContainsInOrder.stringContainsInOrder(strings)); assertThat(bundleString, StringContainsInOrder.stringContainsInOrder(strings));
b.getEntries().remove(2);
bundleString = ourCtx.newJsonParser().setPrettyPrint(true).encodeBundleToString(b);
assertThat(bundleString, not(containsString("deleted")));
} }
@Test @Test
public void testSimpleBundleEncode() throws IOException { public void testSimpleBundleEncode() throws IOException {

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.UnsupportedEncodingException;
import java.text.Normalizer; import java.text.Normalizer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@ -34,6 +35,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.entity.BaseHasResource; import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseTag; import ca.uhn.fhir.jpa.entity.BaseTag;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTag; import ca.uhn.fhir.jpa.entity.ResourceHistoryTag;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
@ -44,6 +46,7 @@ import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable; import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag; import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagDefinition; import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.util.StopWatch;
import ca.uhn.fhir.model.api.IDatatype; import ca.uhn.fhir.model.api.IDatatype;
import ca.uhn.fhir.model.api.IPrimitiveDatatype; import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
@ -73,7 +76,9 @@ import com.google.common.collect.Collections2;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
public abstract class BaseFhirDao { public abstract class BaseFhirDao {
private FhirContext myContext = new FhirContext();
@Autowired(required=true)
private FhirContext myContext;
@PersistenceContext(name = "FHIR_UT", type = PersistenceContextType.TRANSACTION, unitName = "FHIR_UT") @PersistenceContext(name = "FHIR_UT", type = PersistenceContextType.TRANSACTION, unitName = "FHIR_UT")
private EntityManager myEntityManager; private EntityManager myEntityManager;
@ -576,11 +581,16 @@ public abstract class BaseFhirDao {
protected ArrayList<IResource> history(String theResourceName, Long theId, Date theSince, Integer theLimit) { protected ArrayList<IResource> history(String theResourceName, Long theId, Date theSince, Integer theLimit) {
List<HistoryTuple> tuples = new ArrayList<HistoryTuple>(); List<HistoryTuple> tuples = new ArrayList<HistoryTuple>();
StopWatch timer = new StopWatch();
// Get list of IDs // Get list of IDs
searchHistoryCurrentVersion(theResourceName, theId, theSince, theLimit, tuples); searchHistoryCurrentVersion(theResourceName, theId, theSince, theLimit, tuples);
assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated()); assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated());
ourLog.info("Retrieved {} history IDs from current versions in {} ms", tuples.size(), timer.getMillisAndRestart());
searchHistoryHistory(theResourceName, theId, theSince, theLimit, tuples); searchHistoryHistory(theResourceName, theId, theSince, theLimit, tuples);
assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated()); assert tuples.size() < 2 || !tuples.get(tuples.size() - 2).getUpdated().before(tuples.get(tuples.size() - 1).getUpdated());
ourLog.info("Retrieved {} history IDs from previous versions in {} ms", tuples.size(), timer.getMillisAndRestart());
// Sort merged list // Sort merged list
Collections.sort(tuples, Collections.reverseOrder()); Collections.sort(tuples, Collections.reverseOrder());
@ -588,8 +598,23 @@ public abstract class BaseFhirDao {
// Pull actual resources // Pull actual resources
List<BaseHasResource> resEntities = Lists.newArrayList(); List<BaseHasResource> resEntities = Lists.newArrayList();
int limit;
if (theLimit != null && theLimit < myConfig.getHardSearchLimit()) {
limit = theLimit;
} else {
limit = myConfig.getHardSearchLimit();
}
if (tuples.size() > limit) {
tuples = tuples.subList(0, limit);
}
searchHistoryCurrentVersion(tuples, resEntities); searchHistoryCurrentVersion(tuples, resEntities);
ourLog.info("Loaded history from current versions in {} ms", timer.getMillisAndRestart());
searchHistoryHistory(tuples, resEntities); searchHistoryHistory(tuples, resEntities);
ourLog.info("Loaded history from previous versions in {} ms", timer.getMillisAndRestart());
Collections.sort(resEntities, new Comparator<BaseHasResource>() { Collections.sort(resEntities, new Comparator<BaseHasResource>() {
@Override @Override
@ -598,12 +623,6 @@ public abstract class BaseFhirDao {
} }
}); });
int limit;
if (theLimit != null && theLimit < myConfig.getHardSearchLimit()) {
limit = theLimit;
} else {
limit = myConfig.getHardSearchLimit();
}
if (resEntities.size() > limit) { if (resEntities.size() > limit) {
resEntities = resEntities.subList(0, limit); resEntities = resEntities.subList(0, limit);
} }
@ -651,8 +670,21 @@ public abstract class BaseFhirDao {
theEntity.setUpdated(new Date()); theEntity.setUpdated(new Date());
theEntity.setResourceType(toResourceName(theResource)); theEntity.setResourceType(toResourceName(theResource));
theEntity.setResource(getContext().newJsonParser().encodeResourceToString(theResource));
theEntity.setEncoding(EncodingEnum.JSON); String encoded = myConfig.getResourceEncoding().newParser(myContext).encodeResourceToString(theResource);
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
theEntity.setEncoding(encoding);
try {
switch (encoding) {
case JSON:
theEntity.setResource(encoded.getBytes("UTF-8"));
break;
case JSONC:
theEntity.setResource(GZipUtil.compress(encoded));
break;
}
} catch (UnsupportedEncodingException e) {
}
TagList tagList = (TagList) theResource.getResourceMetadata().get(ResourceMetadataKeyEnum.TAG_LIST); TagList tagList = (TagList) theResource.getResourceMetadata().get(ResourceMetadataKeyEnum.TAG_LIST);
if (tagList != null) { if (tagList != null) {
@ -678,7 +710,20 @@ public abstract class BaseFhirDao {
} }
protected <T extends IResource> T toResource(Class<T> theResourceType, BaseHasResource theEntity) { protected <T extends IResource> T toResource(Class<T> theResourceType, BaseHasResource theEntity) {
String resourceText = theEntity.getResource(); String resourceText=null;
switch (theEntity.getEncoding()) {
case JSON:
try {
resourceText = new String(theEntity.getResource(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new Error("Should not happen", e);
}
break;
case JSONC:
resourceText = GZipUtil.decompress(theEntity.getResource());
break;
}
IParser parser = theEntity.getEncoding().newParser(getContext()); IParser parser = theEntity.getEncoding().newParser(getContext());
T retVal = parser.parseResource(theResourceType, resourceText); T retVal = parser.parseResource(theResourceType, resourceText);
retVal.setId(theEntity.getIdDt()); retVal.setId(theEntity.getIdDt());

View File

@ -1,9 +1,12 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
public class DaoConfig { public class DaoConfig {
private int myHardSearchLimit = 1000; private int myHardSearchLimit = 1000;
private int myHardTagListLimit = 1000; private int myHardTagListLimit = 1000;
private ResourceEncodingEnum myResourceEncoding=ResourceEncodingEnum.JSONC;
public int getHardSearchLimit() { public int getHardSearchLimit() {
return myHardSearchLimit; return myHardSearchLimit;
@ -21,4 +24,12 @@ public class DaoConfig {
myHardTagListLimit = theHardTagListLimit; myHardTagListLimit = theHardTagListLimit;
} }
public ResourceEncodingEnum getResourceEncoding() {
return myResourceEncoding;
}
public void setResourceEncoding(ResourceEncodingEnum theResourceEncoding) {
myResourceEncoding = theResourceEncoding;
}
} }

View File

@ -34,14 +34,16 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
@PersistenceContext() @PersistenceContext()
private EntityManager myEntityManager; private EntityManager myEntityManager;
private FhirContext myContext = new FhirContext();
@Transactional(propagation = Propagation.REQUIRED) @Transactional(propagation = Propagation.REQUIRED)
@Override @Override
public void transaction(List<IResource> theResources) { public void transaction(List<IResource> theResources) {
ourLog.info("Beginning transaction with {} resources", theResources.size()); ourLog.info("Beginning transaction with {} resources", theResources.size());
long start = System.currentTimeMillis();
FhirTerser terser = myContext.newTerser(); FhirTerser terser = getContext().newTerser();
int creations = 0;
int updates = 0;
Map<IdDt, IdDt> idConversions = new HashMap<IdDt, IdDt>(); Map<IdDt, IdDt> idConversions = new HashMap<IdDt, IdDt>();
List<ResourceTable> persistedResources = new ArrayList<ResourceTable>(); List<ResourceTable> persistedResources = new ArrayList<ResourceTable>();
@ -72,6 +74,11 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
entity = toEntity(nextResource); entity = toEntity(nextResource);
myEntityManager.persist(entity); myEntityManager.persist(entity);
myEntityManager.flush(); myEntityManager.flush();
creations++;
ourLog.info("Resource Type[{}] with ID[{}] does not exist, creating it", resourceName, nextId);
} else {
updates++;
ourLog.info("Resource Type[{}] with ID[{}] exists, updating it", resourceName, nextId);
} }
IdDt newId = new IdDt(resourceName + '/' + entity.getId()); IdDt newId = new IdDt(resourceName + '/' + entity.getId());
@ -80,8 +87,11 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
} else if (newId.equals(entity.getId())) { } else if (newId.equals(entity.getId())) {
ourLog.info("Transaction resource ID[{}] is being updated", newId); ourLog.info("Transaction resource ID[{}] is being updated", newId);
} else { } else {
ourLog.info("Transaction resource ID[{}] has been assigned new ID[{}]", nextId, newId); if (!nextId.getUnqualifiedId().startsWith("#")) {
idConversions.put(nextId, newId); nextId = new IdDt(resourceName + '/' + nextId.getUnqualifiedId());
ourLog.info("Transaction resource ID[{}] has been assigned new ID[{}]", nextId, newId);
idConversions.put(nextId, newId);
}
} }
persistedResources.add(entity); persistedResources.add(entity);
@ -97,7 +107,7 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
ourLog.info(" * Replacing resource ref {} with {}", nextId, newId); ourLog.info(" * Replacing resource ref {} with {}", nextId, newId);
nextRef.setReference(newId); nextRef.setReference(newId);
} else { } else {
ourLog.info(" * Reference [{}] does not exist in bundle", nextId); ourLog.debug(" * Reference [{}] does not exist in bundle", nextId);
} }
} }
} }
@ -108,6 +118,9 @@ public class FhirSystemDao extends BaseFhirDao implements IFhirSystemDao {
updateEntity(resource, table, table.getId() != null); updateEntity(resource, table, table.getId() != null);
} }
long delay = System.currentTimeMillis() - start;
ourLog.info("Transaction completed in {}ms with {} creations and {} updates", new Object[] {delay, creations, updates});
} }
@Override @Override

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.dao;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.io.IOUtils;
import ca.uhn.fhir.parser.DataFormatException;
public class GZipUtil {
public static String decompress(byte[] theResource) {
GZIPInputStream is;
try {
is = new GZIPInputStream(new ByteArrayInputStream(theResource));
return IOUtils.toString(is, "UTF-8");
} catch (IOException e) {
throw new DataFormatException("Failed to decompress contents", e);
}
}
public static byte[] compress(String theEncoded) {
try {
ByteArrayOutputStream os = new ByteArrayOutputStream();
GZIPOutputStream gos = new GZIPOutputStream(os);
IOUtils.write(theEncoded, gos, "UTF-8");
gos.close();
os.close();
byte[] retVal = os.toByteArray();
return retVal;
} catch (IOException e) {
throw new DataFormatException("Compress contents", e);
}
}
}

View File

@ -13,14 +13,13 @@ import javax.persistence.TemporalType;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.server.EncodingEnum;
@MappedSuperclass @MappedSuperclass
public abstract class BaseHasResource { public abstract class BaseHasResource {
@Column(name = "RES_ENCODING", nullable = false, length=4) @Column(name = "RES_ENCODING", nullable = false, length=5)
@Enumerated(EnumType.STRING) @Enumerated(EnumType.STRING)
private EncodingEnum myEncoding; private ResourceEncodingEnum myEncoding;
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_PUBLISHED", nullable = false) @Column(name = "RES_PUBLISHED", nullable = false)
@ -28,13 +27,13 @@ public abstract class BaseHasResource {
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false) @Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = false)
@Lob() @Lob()
private String myResource; private byte[] myResource;
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_UPDATED", nullable = false) @Column(name = "RES_UPDATED", nullable = false)
private Date myUpdated; private Date myUpdated;
public EncodingEnum getEncoding() { public ResourceEncodingEnum getEncoding() {
return myEncoding; return myEncoding;
} }
@ -48,7 +47,7 @@ public abstract class BaseHasResource {
return new InstantDt(myPublished); return new InstantDt(myPublished);
} }
public String getResource() { public byte[] getResource() {
return myResource; return myResource;
} }
@ -58,7 +57,7 @@ public abstract class BaseHasResource {
public abstract long getVersion(); public abstract long getVersion();
public void setEncoding(EncodingEnum theEncoding) { public void setEncoding(ResourceEncodingEnum theEncoding) {
myEncoding = theEncoding; myEncoding = theEncoding;
} }
@ -70,7 +69,7 @@ public abstract class BaseHasResource {
myPublished = thePublished.getValue(); myPublished = thePublished.getValue();
} }
public void setResource(String theResource) { public void setResource(byte[] theResource) {
myResource = theResource; myResource = theResource;
} }

View File

@ -0,0 +1,18 @@
package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
public enum ResourceEncodingEnum {
/** Json */
JSON,
/** Json Compressed */
JSONC;
public IParser newParser(FhirContext theContext) {
return theContext.newJsonParser();
}
}

View File

@ -0,0 +1,15 @@
package ca.uhn.fhir.jpa.util;
public class StopWatch {
private long myStarted = System.currentTimeMillis();
public long getMillisAndRestart() {
long now = System.currentTimeMillis();
long retVal = now - myStarted;
myStarted = now;
return retVal;
}
}

View File

@ -5,6 +5,7 @@ import static org.junit.Assert.*;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -164,6 +165,50 @@ public class FhirSystemDaoTest {
} }
@Test
public void testTransactionWithUpdate() throws Exception {
List<IResource> res = new ArrayList<IResource>();
Patient p1 = new Patient();
p1.getId().setValue("testTransactionWithUpdateXXX01");
p1.addIdentifier("system", "testTransactionWithUpdate01");
res.add(p1);
Observation p2 = new Observation();
p2.getId().setValue("testTransactionWithUpdateXXX02");
p2.getIdentifier().setSystem("system").setValue("testTransactionWithUpdate02");
p2.setSubject(new ResourceReferenceDt("Patient/testTransactionWithUpdateXXX01"));
res.add(p2);
ourSystemDao.transaction(res);
assertFalse(p1.getId().isEmpty());
assertNotEquals("testTransactionWithUpdateXXX01", p1.getId().getUnqualifiedVersionId());
assertFalse(p2.getId().isEmpty());
assertNotEquals("testTransactionWithUpdateXXX02", p2.getId().getUnqualifiedVersionId());
assertEquals(p1.getId().unqualified().withoutVersion(), p2.getSubject().getReference());
IdDt p1id = p1.getId().unqualified().withoutVersion();
IdDt p1idWithVer = p1.getId().unqualified();
IdDt p2id = p2.getId().unqualified().withoutVersion();
IdDt p2idWithVer = p2.getId().unqualified();
p1.addName().addFamily("Name1");
p1.setId(p1.getId().unqualified().withoutVersion());
p2.addReferenceRange().setHigh(123L);
p2.setId(p2.getId().unqualified().withoutVersion());
ourSystemDao.transaction(res);
assertEquals(p1id, p1.getId().unqualified().withoutVersion());
assertEquals(p2id, p2.getId().unqualified().withoutVersion());
assertNotEquals(p1idWithVer, p1.getId().unqualified());
assertNotEquals(p2idWithVer, p2.getId().unqualified());
}
@Test @Test
public void testTransactionFromBundle() throws Exception { public void testTransactionFromBundle() throws Exception {

View File

@ -29,7 +29,6 @@
<property name="hibernate.connection.password" value="" /> <property name="hibernate.connection.password" value="" />
<property name="hibernate.jdbc.batch_size" value="0" /> <property name="hibernate.jdbc.batch_size" value="0" />
<property name="hibernate.cache.use_minimal_puts" value="false" /> <property name="hibernate.cache.use_minimal_puts" value="false" />
<property name="hibernate.show_sql" value="true" />
<property name="hibernate.cache.use_query_cache" value="false" /> <property name="hibernate.cache.use_query_cache" value="false" />
<property name="hibernate.cache.use_second_level_cache" value="false" /> <property name="hibernate.cache.use_second_level_cache" value="false" />
<property name="hibernate.cache.use_structured_entries" value="false" /> <property name="hibernate.cache.use_structured_entries" value="false" />

View File

@ -13,6 +13,8 @@
<context:annotation-config /> <context:annotation-config />
<context:mbean-server /> <context:mbean-server />
<bean class="ca.uhn.fhir.context.FhirContext"></bean>
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig"> <bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
</bean> </bean>
@ -63,7 +65,7 @@
<property name="persistenceUnitName" value="FHIR_UT" /> <property name="persistenceUnitName" value="FHIR_UT" />
<property name="jpaVendorAdapter"> <property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter"> <bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="showSql" value="true" /> <property name="showSql" value="false" />
<property name="generateDdl" value="true" /> <property name="generateDdl" value="true" />
<!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" /> --> <!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" /> -->
<property name="databasePlatform" value="org.hibernate.dialect.DerbyTenSevenDialect" /> <property name="databasePlatform" value="org.hibernate.dialect.DerbyTenSevenDialect" />

View File

@ -13,6 +13,8 @@
<context:annotation-config /> <context:annotation-config />
<context:mbean-server /> <context:mbean-server />
<bean class="ca.uhn.fhir.context.FhirContext"></bean>
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig"> <bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
</bean> </bean>

View File

@ -13,7 +13,7 @@
<context:annotation-config /> <context:annotation-config />
<context:mbean-server /> <context:mbean-server />
<!-- <import resource="classpath:hapi-jpaserver-springbeans.xml" /> --> <bean class="ca.uhn.fhir.context.FhirContext"></bean>
<bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig"> <bean id="myDaoConfig" class="ca.uhn.fhir.jpa.dao.DaoConfig">
</bean> </bean>

View File

@ -404,6 +404,16 @@ public class ${className}
return null; return null;
} }
#end #end
#if ( ${className} == "ResourceReferenceDt" )
@Override
public String toString() {
org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder(this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE);
b.append("reference", getReference().getValueAsString());
b.append("loaded", getResource()!=null);
return b.toString();
}
#end
#childExtensionTypes( $childExtensionTypes ) #childExtensionTypes( $childExtensionTypes )
} }