Performance enhancements to JPA
This commit is contained in:
parent
9ac6014a3c
commit
56a71f9222
|
@ -1659,63 +1659,63 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
*/
|
*/
|
||||||
if (thePerformIndexing) {
|
if (thePerformIndexing) {
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamString next : existingStringParams) {
|
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamString next : stringParams) {
|
for (ResourceIndexedSearchParamString next : removeCommon(stringParams, existingStringParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamToken next : existingTokenParams) {
|
for (ResourceIndexedSearchParamToken next : removeCommon(existingTokenParams, tokenParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamToken next : tokenParams) {
|
for (ResourceIndexedSearchParamToken next : removeCommon(tokenParams, existingTokenParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamNumber next : existingNumberParams) {
|
for (ResourceIndexedSearchParamNumber next : removeCommon(existingNumberParams, numberParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamNumber next : numberParams) {
|
for (ResourceIndexedSearchParamNumber next : removeCommon(numberParams, existingNumberParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamQuantity next : existingQuantityParams) {
|
for (ResourceIndexedSearchParamQuantity next : removeCommon(existingQuantityParams, quantityParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamQuantity next : quantityParams) {
|
for (ResourceIndexedSearchParamQuantity next : removeCommon(quantityParams, existingQuantityParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store date SP's
|
// Store date SP's
|
||||||
for (ResourceIndexedSearchParamDate next : existingDateParams) {
|
for (ResourceIndexedSearchParamDate next : removeCommon(existingDateParams, dateParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamDate next : dateParams) {
|
for (ResourceIndexedSearchParamDate next : removeCommon(dateParams, existingDateParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store URI SP's
|
// Store URI SP's
|
||||||
for (ResourceIndexedSearchParamUri next : existingUriParams) {
|
for (ResourceIndexedSearchParamUri next : removeCommon(existingUriParams, uriParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamUri next : uriParams) {
|
for (ResourceIndexedSearchParamUri next : removeCommon(uriParams, existingUriParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store Coords SP's
|
// Store Coords SP's
|
||||||
for (ResourceIndexedSearchParamCoords next : existingCoordsParams) {
|
for (ResourceIndexedSearchParamCoords next : removeCommon(existingCoordsParams, coordsParams)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceIndexedSearchParamCoords next : coordsParams) {
|
for (ResourceIndexedSearchParamCoords next : removeCommon(coordsParams, existingCoordsParams)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store resource links
|
// Store resource links
|
||||||
for (ResourceLink next : existingResourceLinks) {
|
for (ResourceLink next : removeCommon(existingResourceLinks, links)) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceLink next : links) {
|
for (ResourceLink next : removeCommon(links, existingResourceLinks)) {
|
||||||
myEntityManager.persist(next);
|
myEntityManager.persist(next);
|
||||||
}
|
}
|
||||||
// make sure links are indexed
|
// make sure links are indexed
|
||||||
|
@ -1753,6 +1753,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
||||||
return theEntity;
|
return theEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private <T> Collection<T> removeCommon(Collection<T> theInput, Collection<T> theToRemove) {
|
||||||
|
assert theInput != theToRemove;
|
||||||
|
|
||||||
|
if (theInput.isEmpty()) {
|
||||||
|
return theInput;
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayList<T> retVal = new ArrayList<>(theInput);
|
||||||
|
retVal.removeAll(theToRemove);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
|
protected ResourceTable updateEntity(IBaseResource theResource, ResourceTable entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
|
||||||
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
|
return updateEntity(theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ public abstract class BaseJpaSystemProviderDstu2Plus<T, MT> extends BaseJpaSyste
|
||||||
@OperationParam(name="status")
|
@OperationParam(name="status")
|
||||||
})
|
})
|
||||||
public IBaseResource markAllResourcesForReindexing() {
|
public IBaseResource markAllResourcesForReindexing() {
|
||||||
int count = getDao().markAllResourcesForReindexing();
|
Integer count = getDao().markAllResourcesForReindexing();
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
|
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
|
||||||
|
|
||||||
|
@ -48,11 +48,16 @@ public abstract class BaseJpaSystemProviderDstu2Plus<T, MT> extends BaseJpaSyste
|
||||||
@OperationParam(name="status")
|
@OperationParam(name="status")
|
||||||
})
|
})
|
||||||
public IBaseResource performReindexingPass() {
|
public IBaseResource performReindexingPass() {
|
||||||
int count = getDao().performReindexingPass(1000);
|
Integer count = getDao().performReindexingPass(1000);
|
||||||
|
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
|
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
|
||||||
|
|
||||||
IPrimitiveType<?> string = ParametersUtil.createString(getContext(), "Indexed " + count + " resources");
|
IPrimitiveType<?> string;
|
||||||
|
if (count == null) {
|
||||||
|
string = ParametersUtil.createString(getContext(), "Index pass already proceeding");
|
||||||
|
} else {
|
||||||
|
string = ParametersUtil.createString(getContext(), "Indexed " + count + " resources");
|
||||||
|
}
|
||||||
ParametersUtil.addParameterToParameters(getContext(), retVal, string, "status");
|
ParametersUtil.addParameterToParameters(getContext(), retVal, string, "status");
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||||
|
import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
|
||||||
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||||
import org.apache.commons.dbcp2.BasicDataSource;
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
|
@ -100,7 +101,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
.create(retVal)
|
.create(retVal)
|
||||||
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||||
.countQuery()
|
.countQuery(new ThreadQueryCountHolder())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return dataSource;
|
return dataSource;
|
||||||
|
@ -119,7 +120,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
|
|
||||||
private Properties jpaProperties() {
|
private Properties jpaProperties() {
|
||||||
Properties extraProperties = new Properties();
|
Properties extraProperties = new Properties();
|
||||||
extraProperties.put("hibernate.jdbc.batch_size", "50");
|
extraProperties.put("hibernate.jdbc.batch_size", "1");
|
||||||
extraProperties.put("hibernate.format_sql", "false");
|
extraProperties.put("hibernate.format_sql", "false");
|
||||||
extraProperties.put("hibernate.show_sql", "false");
|
extraProperties.put("hibernate.show_sql", "false");
|
||||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||||
|
|
|
@ -8,6 +8,7 @@ import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
import net.ttddyy.dsproxy.QueryCountHolder;
|
||||||
import org.hl7.fhir.r4.model.*;
|
import org.hl7.fhir.r4.model.*;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
@ -28,19 +29,10 @@ import ca.uhn.fhir.util.TestUtil;
|
||||||
public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4UpdateTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4UpdateTest.class);
|
||||||
|
|
||||||
@Test
|
@After
|
||||||
public void testReCreateMatchResource() {
|
public void afterResetDao() {
|
||||||
|
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
|
||||||
CodeSystem codeSystem = new CodeSystem();
|
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||||
codeSystem.setUrl("http://foo");
|
|
||||||
IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
myCodeSystemDao.delete(id);
|
|
||||||
|
|
||||||
codeSystem = new CodeSystem();
|
|
||||||
codeSystem.setUrl("http://foo");
|
|
||||||
myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -112,59 +104,6 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
|
||||||
public void afterResetDao() {
|
|
||||||
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testHardMetaCapIsEnforcedOnCreate() {
|
|
||||||
myDaoConfig.setResourceMetaCountHardLimit(3);
|
|
||||||
|
|
||||||
IIdType id;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.getMeta().addTag().setSystem("http://foo").setCode("1");
|
|
||||||
patient.getMeta().addTag().setSystem("http://foo").setCode("2");
|
|
||||||
patient.getMeta().addTag().setSystem("http://foo").setCode("3");
|
|
||||||
patient.getMeta().addTag().setSystem("http://foo").setCode("4");
|
|
||||||
patient.setActive(true);
|
|
||||||
try {
|
|
||||||
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
fail();
|
|
||||||
} catch (UnprocessableEntityException e) {
|
|
||||||
assertEquals("Resource contains 4 meta entries (tag/profile/security label), maximum is 3", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testHardMetaCapIsEnforcedOnMetaAdd() {
|
|
||||||
myDaoConfig.setResourceMetaCountHardLimit(3);
|
|
||||||
|
|
||||||
IIdType id;
|
|
||||||
{
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.setActive(true);
|
|
||||||
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
Meta meta = new Meta();
|
|
||||||
meta.addTag().setSystem("http://foo").setCode("1");
|
|
||||||
meta.addTag().setSystem("http://foo").setCode("2");
|
|
||||||
meta.addTag().setSystem("http://foo").setCode("3");
|
|
||||||
meta.addTag().setSystem("http://foo").setCode("4");
|
|
||||||
try {
|
|
||||||
myPatientDao.metaAddOperation(id, meta, null);
|
|
||||||
fail();
|
|
||||||
} catch (UnprocessableEntityException e) {
|
|
||||||
assertEquals("Resource contains 4 meta entries (tag/profile/security label), maximum is 3", e.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDuplicateTagsOnAddTagsIgnored() {
|
public void testDuplicateTagsOnAddTagsIgnored() {
|
||||||
IIdType id;
|
IIdType id;
|
||||||
|
@ -179,7 +118,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val2");
|
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val2");
|
||||||
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
||||||
myPatientDao.metaAddOperation(id, meta, null);
|
myPatientDao.metaAddOperation(id, meta, null);
|
||||||
|
|
||||||
// Do a read
|
// Do a read
|
||||||
{
|
{
|
||||||
Patient patient = myPatientDao.read(id, mySrd);
|
Patient patient = myPatientDao.read(id, mySrd);
|
||||||
|
@ -190,7 +129,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDuplicateTagsOnUpdateIgnored() {
|
public void testDuplicateTagsOnUpdateIgnored() {
|
||||||
IIdType id;
|
IIdType id;
|
||||||
|
@ -209,7 +148,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
patient.getMeta().addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
patient.getMeta().addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
||||||
myPatientDao.update(patient, mySrd).getId().toUnqualifiedVersionless();
|
myPatientDao.update(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do a read on second version
|
// Do a read on second version
|
||||||
{
|
{
|
||||||
Patient patient = myPatientDao.read(id, mySrd);
|
Patient patient = myPatientDao.read(id, mySrd);
|
||||||
|
@ -243,6 +182,54 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHardMetaCapIsEnforcedOnCreate() {
|
||||||
|
myDaoConfig.setResourceMetaCountHardLimit(3);
|
||||||
|
|
||||||
|
IIdType id;
|
||||||
|
{
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.getMeta().addTag().setSystem("http://foo").setCode("1");
|
||||||
|
patient.getMeta().addTag().setSystem("http://foo").setCode("2");
|
||||||
|
patient.getMeta().addTag().setSystem("http://foo").setCode("3");
|
||||||
|
patient.getMeta().addTag().setSystem("http://foo").setCode("4");
|
||||||
|
patient.setActive(true);
|
||||||
|
try {
|
||||||
|
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
fail();
|
||||||
|
} catch (UnprocessableEntityException e) {
|
||||||
|
assertEquals("Resource contains 4 meta entries (tag/profile/security label), maximum is 3", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHardMetaCapIsEnforcedOnMetaAdd() {
|
||||||
|
myDaoConfig.setResourceMetaCountHardLimit(3);
|
||||||
|
|
||||||
|
IIdType id;
|
||||||
|
{
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.setActive(true);
|
||||||
|
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Meta meta = new Meta();
|
||||||
|
meta.addTag().setSystem("http://foo").setCode("1");
|
||||||
|
meta.addTag().setSystem("http://foo").setCode("2");
|
||||||
|
meta.addTag().setSystem("http://foo").setCode("3");
|
||||||
|
meta.addTag().setSystem("http://foo").setCode("4");
|
||||||
|
try {
|
||||||
|
myPatientDao.metaAddOperation(id, meta, null);
|
||||||
|
fail();
|
||||||
|
} catch (UnprocessableEntityException e) {
|
||||||
|
assertEquals("Resource contains 4 meta entries (tag/profile/security label), maximum is 3", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMultipleUpdatesWithNoChangesDoesNotResultInAnUpdateForDiscreteUpdates() {
|
public void testMultipleUpdatesWithNoChangesDoesNotResultInAnUpdateForDiscreteUpdates() {
|
||||||
|
|
||||||
|
@ -291,6 +278,21 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReCreateMatchResource() {
|
||||||
|
|
||||||
|
CodeSystem codeSystem = new CodeSystem();
|
||||||
|
codeSystem.setUrl("http://foo");
|
||||||
|
IIdType id = myCodeSystemDao.create(codeSystem).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
myCodeSystemDao.delete(id);
|
||||||
|
|
||||||
|
codeSystem = new CodeSystem();
|
||||||
|
codeSystem.setUrl("http://foo");
|
||||||
|
myCodeSystemDao.update(codeSystem, "Patient?name=FAM").getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testUpdateAndGetHistoryResource() throws InterruptedException {
|
public void testUpdateAndGetHistoryResource() throws InterruptedException {
|
||||||
Patient patient = new Patient();
|
Patient patient = new Patient();
|
||||||
|
@ -662,6 +664,31 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdateReusesIndexes() {
|
||||||
|
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
|
||||||
|
|
||||||
|
QueryCountHolder.clear();
|
||||||
|
|
||||||
|
Patient pt = new Patient();
|
||||||
|
pt.setActive(true);
|
||||||
|
pt.addName().setFamily("FAMILY1").addGiven("GIVEN1A").addGiven("GIVEN1B");
|
||||||
|
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
ourLog.info("Now have {} deleted", QueryCountHolder.getGrandTotal().getDelete());
|
||||||
|
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
|
||||||
|
QueryCountHolder.clear();
|
||||||
|
|
||||||
|
pt.setId(id);
|
||||||
|
pt.getNameFirstRep().addGiven("GIVEN1C");
|
||||||
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
|
ourLog.info("Now have {} deleted", QueryCountHolder.getGrandTotal().getDelete());
|
||||||
|
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
|
||||||
|
assertEquals(0, QueryCountHolder.getGrandTotal().getDelete());
|
||||||
|
assertEquals(4, QueryCountHolder.getGrandTotal().getInsert());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testUpdateUnknownNumericIdFails() {
|
public void testUpdateUnknownNumericIdFails() {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
|
|
|
@ -6,6 +6,13 @@
|
||||||
<title>HAPI FHIR Changelog</title>
|
<title>HAPI FHIR Changelog</title>
|
||||||
</properties>
|
</properties>
|
||||||
<body>
|
<body>
|
||||||
|
<release version="3.1.0" date="TBD">
|
||||||
|
<action type="add">
|
||||||
|
A performance to the JPA server has been made which reduces the number
|
||||||
|
of changes to index tables when updating a resource with contents that
|
||||||
|
only make minor changes
|
||||||
|
</action>
|
||||||
|
</release>
|
||||||
<release version="3.0.0" date="2017-09-27">
|
<release version="3.0.0" date="2017-09-27">
|
||||||
<action type="add">
|
<action type="add">
|
||||||
Support for FHIR R4 (current working draft) has been <![CDATA[<b>added</b>]]>
|
Support for FHIR R4 (current working draft) has been <![CDATA[<b>added</b>]]>
|
||||||
|
|
Loading…
Reference in New Issue