Move query count tests to consolidate them, and avoid an accidental

rewrite of existing indexes in some conditions
This commit is contained in:
James Agnew 2018-10-24 17:54:58 -03:00
parent f601b212ad
commit 794d9145e9
20 changed files with 3736 additions and 89 deletions

View File

@ -31,7 +31,7 @@ import java.util.List;
* @author james * @author james
* *
*/ */
public class StringClientParam extends BaseClientParam implements IParam { public class StringClientParam extends BaseClientParam implements IParam {
private final String myParamName; private final String myParamName;

View File

@ -2086,6 +2086,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
*/ */
if (thePerformIndexing) { if (thePerformIndexing) {
calculateHashes(stringParams);
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) { for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
next.setDaoConfig(myConfig); next.setDaoConfig(myConfig);
myEntityManager.remove(next); myEntityManager.remove(next);
@ -2095,6 +2096,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
myEntityManager.persist(next); myEntityManager.persist(next);
} }
calculateHashes(tokenParams);
for (ResourceIndexedSearchParamToken next : removeCommon(existingTokenParams, tokenParams)) { for (ResourceIndexedSearchParamToken next : removeCommon(existingTokenParams, tokenParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsToken().remove(next); theEntity.getParamsToken().remove(next);
@ -2103,6 +2105,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
myEntityManager.persist(next); myEntityManager.persist(next);
} }
calculateHashes(numberParams);
for (ResourceIndexedSearchParamNumber next : removeCommon(existingNumberParams, numberParams)) { for (ResourceIndexedSearchParamNumber next : removeCommon(existingNumberParams, numberParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsNumber().remove(next); theEntity.getParamsNumber().remove(next);
@ -2111,6 +2114,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
myEntityManager.persist(next); myEntityManager.persist(next);
} }
calculateHashes(quantityParams);
for (ResourceIndexedSearchParamQuantity next : removeCommon(existingQuantityParams, quantityParams)) { for (ResourceIndexedSearchParamQuantity next : removeCommon(existingQuantityParams, quantityParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsQuantity().remove(next); theEntity.getParamsQuantity().remove(next);
@ -2120,6 +2124,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} }
// Store date SP's // Store date SP's
calculateHashes(dateParams);
for (ResourceIndexedSearchParamDate next : removeCommon(existingDateParams, dateParams)) { for (ResourceIndexedSearchParamDate next : removeCommon(existingDateParams, dateParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsDate().remove(next); theEntity.getParamsDate().remove(next);
@ -2129,6 +2134,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} }
// Store URI SP's // Store URI SP's
calculateHashes(uriParams);
for (ResourceIndexedSearchParamUri next : removeCommon(existingUriParams, uriParams)) { for (ResourceIndexedSearchParamUri next : removeCommon(existingUriParams, uriParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsUri().remove(next); theEntity.getParamsUri().remove(next);
@ -2138,6 +2144,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} }
// Store Coords SP's // Store Coords SP's
calculateHashes(coordsParams);
for (ResourceIndexedSearchParamCoords next : removeCommon(existingCoordsParams, coordsParams)) { for (ResourceIndexedSearchParamCoords next : removeCommon(existingCoordsParams, coordsParams)) {
myEntityManager.remove(next); myEntityManager.remove(next);
theEntity.getParamsCoords().remove(next); theEntity.getParamsCoords().remove(next);
@ -2187,6 +2194,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return theEntity; return theEntity;
} }
private void calculateHashes(Collection<? extends BaseResourceIndexedSearchParam> theStringParams) {
for (BaseResourceIndexedSearchParam next : theStringParams) {
next.calculateHashes();
}
}
protected ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable protected ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable
entity, Date theDeletedTimestampOrNull, Date theUpdateTime) { entity, Date theDeletedTimestampOrNull, Date theUpdateTime) {
return updateEntity(theRequest, theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true); return updateEntity(theRequest, theResource, entity, theDeletedTimestampOrNull, true, true, theUpdateTime, false, true);

View File

@ -155,6 +155,7 @@ public class DaoConfig {
private boolean myValidateSearchParameterExpressionsOnSave = true; private boolean myValidateSearchParameterExpressionsOnSave = true;
private List<Integer> mySearchPreFetchThresholds = Arrays.asList(500, 2000, -1); private List<Integer> mySearchPreFetchThresholds = Arrays.asList(500, 2000, -1);
private List<WarmCacheEntry> myWarmCacheEntries = new ArrayList<>(); private List<WarmCacheEntry> myWarmCacheEntries = new ArrayList<>();
private boolean myDisableHashBasedSearches;
/** /**
* Constructor * Constructor
@ -1383,6 +1384,34 @@ public class DaoConfig {
return mySearchPreFetchThresholds; return mySearchPreFetchThresholds;
} }
/**
* If set to <code>true</code> (default is false) the server will not use
* hash based searches. These searches were introduced in HAPI FHIR 3.5.0
* and are the new default way of searching. However they require a very
* large data migration if an existing system has a large amount of data
* so this setting can be used to use the old search mechanism while data
* is migrated.
*
* @since 3.6.0
*/
public boolean getDisableHashBasedSearches() {
return myDisableHashBasedSearches;
}
/**
* If set to <code>true</code> (default is false) the server will not use
* hash based searches. These searches were introduced in HAPI FHIR 3.5.0
* and are the new default way of searching. However they require a very
* large data migration if an existing system has a large amount of data
* so this setting can be used to use the old search mechanism while data
* is migrated.
*
* @since 3.6.0
*/
public void setDisableHashBasedSearches(boolean theDisableHashBasedSearches) {
myDisableHashBasedSearches = theDisableHashBasedSearches;
}
public enum IndexEnabledEnum { public enum IndexEnabledEnum {
ENABLED, ENABLED,
DISABLED DISABLED

View File

@ -55,6 +55,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
@ -94,6 +95,7 @@ public class SearchBuilder implements ISearchBuilder {
private static SearchParameterMap ourLastHandlerParamsForUnitTest; private static SearchParameterMap ourLastHandlerParamsForUnitTest;
private static String ourLastHandlerThreadForUnitTest; private static String ourLastHandlerThreadForUnitTest;
private static boolean ourTrackHandlersForUnitTest; private static boolean ourTrackHandlersForUnitTest;
private final boolean myDontUseHashesForSearch;
protected IResourceTagDao myResourceTagDao; protected IResourceTagDao myResourceTagDao;
private IResourceSearchViewDao myResourceSearchViewDao; private IResourceSearchViewDao myResourceSearchViewDao;
private List<Long> myAlsoIncludePids; private List<Long> myAlsoIncludePids;
@ -130,6 +132,7 @@ public class SearchBuilder implements ISearchBuilder {
myEntityManager = theEntityManager; myEntityManager = theEntityManager;
myFulltextSearchSvc = theFulltextSearchSvc; myFulltextSearchSvc = theFulltextSearchSvc;
myCallingDao = theDao; myCallingDao = theDao;
myDontUseHashesForSearch = theDao.getConfig().getDisableHashBasedSearches();
myResourceIndexedSearchParamUriDao = theResourceIndexedSearchParamUriDao; myResourceIndexedSearchParamUriDao = theResourceIndexedSearchParamUriDao;
myForcedIdDao = theForcedIdDao; myForcedIdDao = theForcedIdDao;
myTerminologySvc = theTerminologySvc; myTerminologySvc = theTerminologySvc;
@ -304,6 +307,15 @@ public class SearchBuilder implements ISearchBuilder {
} }
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) { private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
// if (myDontUseHashesForSearch) {
// Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
// Join<Object, Object> paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
//
// myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
// myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
// myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
// }
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT); Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class); Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
@ -841,10 +853,18 @@ public class SearchBuilder implements ISearchBuilder {
} else { } else {
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value); if (myDontUseHashesForSearch) {
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
codePredicates.add(hashPredicate);
Predicate predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
codePredicates.add(predicate);
} else {
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
codePredicates.add(hashPredicate);
}
} }
} else { } else {
@ -868,6 +888,13 @@ public class SearchBuilder implements ISearchBuilder {
} }
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) { private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
if (myDontUseHashesForSearch) {
Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
return outerPredicate;
}
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName); long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity); Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
return myBuilder.and(hashIdentityPredicate, thePredicate); return myBuilder.and(hashIdentityPredicate, thePredicate);
@ -1079,6 +1106,37 @@ public class SearchBuilder implements ISearchBuilder {
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass()); throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
} }
if (myDontUseHashesForSearch) {
Predicate system = null;
if (!isBlank(systemValue)) {
system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
}
Predicate code = null;
if (!isBlank(unitsValue)) {
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression<BigDecimal> path = theFrom.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate singleCode;
if (system == null && code == null) {
singleCode = num;
} else if (system == null) {
singleCode = theBuilder.and(code, num);
} else if (code == null) {
singleCode = theBuilder.and(system, num);
} else {
singleCode = theBuilder.and(system, code, num);
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
}
Predicate hashPredicate; Predicate hashPredicate;
if (!isBlank(systemValue) && !isBlank(unitsValue)) { if (!isBlank(systemValue) && !isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue); long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
@ -1130,6 +1188,31 @@ public class SearchBuilder implements ISearchBuilder {
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm); + ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
} }
if (myDontUseHashesForSearch) {
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
if (myCallingDao.getConfig().isAllowContainsSearches()) {
if (theParameter instanceof StringParam) {
if (((StringParam) theParameter).isContains()) {
likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
singleCode = theBuilder.and(singleCode, exactCode);
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
}
boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact(); boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
if (exactMatch) { if (exactMatch) {
@ -1234,6 +1317,92 @@ public class SearchBuilder implements ISearchBuilder {
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false); return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
} }
if (myDontUseHashesForSearch) {
ArrayList<Predicate> singleCodePredicates = new ArrayList<Predicate>();
if (codes != null) {
List<Predicate> orPredicates = new ArrayList<Predicate>();
Map<String, List<VersionIndependentConcept>> map = new HashMap<String, List<VersionIndependentConcept>>();
for (VersionIndependentConcept nextCode : codes) {
List<VersionIndependentConcept> systemCodes = map.get(nextCode.getSystem());
if (null == systemCodes) {
systemCodes = new ArrayList<>();
map.put(nextCode.getSystem(), systemCodes);
}
systemCodes.add(nextCode);
}
// Use "in" in case of large numbers of codes due to param modifiers
final Path<String> systemExpression = theFrom.get("mySystem");
final Path<String> valueExpression = theFrom.get("myValue");
for (Map.Entry<String, List<VersionIndependentConcept>> entry : map.entrySet()) {
CriteriaBuilder.In<String> codePredicate = theBuilder.in(valueExpression);
boolean haveAtLeastOneCode = false;
for (VersionIndependentConcept nextCode : entry.getValue()) {
if (isNotBlank(nextCode.getCode())) {
codePredicate.value(nextCode.getCode());
haveAtLeastOneCode = true;
}
}
if (entry.getKey() != null) {
Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
if (haveAtLeastOneCode) {
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
} else {
orPredicates.add(systemPredicate);
}
} else {
orPredicates.add(codePredicate);
}
}
Predicate or = theBuilder.or(orPredicates.toArray(new Predicate[0]));
if (modifier == TokenParamModifier.NOT) {
or = theBuilder.not(or);
}
singleCodePredicates.add(or);
} else {
/*
* Ok, this is a normal query
*/
if (StringUtils.isNotBlank(system)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
}
} else if (system == null) {
// don't check the system
} else {
// If the system is "", we only match on null systems
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
}
if (StringUtils.isNotBlank(code)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
}
} else {
/*
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
* match all tokens with the given value.
*
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
*/
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
}
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
}
/* /*
* Note: A null system value means "match any system", but * Note: A null system value means "match any system", but
* an empty-string system value means "match values that * an empty-string system value means "match values that
@ -1607,9 +1776,14 @@ public class SearchBuilder implements ISearchBuilder {
if (param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { if (param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
thePredicates.add(join.get("mySourcePath").as(String.class).in(param.getPathsSplit())); thePredicates.add(join.get("mySourcePath").as(String.class).in(param.getPathsSplit()));
} else { } else {
Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(myResourceName, theSort.getParamName()); if (myDontUseHashesForSearch) {
Predicate joinParam1 = theBuilder.equal(join.get("myHashIdentity"), hashIdentity); Predicate joinParam1 = theBuilder.equal(join.get("myParamName"), theSort.getParamName());
thePredicates.add(joinParam1); thePredicates.add(joinParam1);
} else {
Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(myResourceName, theSort.getParamName());
Predicate joinParam1 = theBuilder.equal(join.get("myHashIdentity"), hashIdentity);
thePredicates.add(joinParam1);
}
} }
} else { } else {
ourLog.debug("Reusing join for {}", theSort.getParamName()); ourLog.debug("Reusing join for {}", theSort.getParamName());
@ -1668,7 +1842,7 @@ public class SearchBuilder implements ISearchBuilder {
//-- preload all tags with tag definition if any //-- preload all tags with tag definition if any
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList); Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
Long resourceId = null; Long resourceId;
for (ResourceSearchView next : resourceSearchViewList) { for (ResourceSearchView next : resourceSearchViewList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass(); Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
@ -1706,7 +1880,7 @@ public class SearchBuilder implements ISearchBuilder {
private Map<Long, Collection<ResourceTag>> getResourceTagMap(Collection<ResourceSearchView> theResourceSearchViewList) { private Map<Long, Collection<ResourceTag>> getResourceTagMap(Collection<ResourceSearchView> theResourceSearchViewList) {
List<Long> idList = new ArrayList<Long>(theResourceSearchViewList.size()); List<Long> idList = new ArrayList<>(theResourceSearchViewList.size());
//-- find all resource has tags //-- find all resource has tags
for (ResourceSearchView resource : theResourceSearchViewList) { for (ResourceSearchView resource : theResourceSearchViewList) {

View File

@ -129,6 +129,8 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
public abstract IQueryParameterType toQueryParameterType(); public abstract IQueryParameterType toQueryParameterType();
public abstract void calculateHashes();
public static long calculateHashIdentity(String theResourceType, String theParamName) { public static long calculateHashIdentity(String theResourceType, String theParamName) {
return hash(theResourceType, theParamName); return hash(theResourceType, theParamName);
} }

View File

@ -67,6 +67,7 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
setLongitude(theLongitude); setLongitude(theLongitude);
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashIdentity == null) { if (myHashIdentity == null) {

View File

@ -83,6 +83,7 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
myOriginalValue = theOriginalValue; myOriginalValue = theOriginalValue;
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashIdentity == null) { if (myHashIdentity == null) {

View File

@ -69,6 +69,7 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
setValue(theValue); setValue(theValue);
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashIdentity == null) { if (myHashIdentity == null) {

View File

@ -95,6 +95,7 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
setUnits(theUnits); setUnits(theUnits);
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashIdentity == null) { if (myHashIdentity == null) {

View File

@ -161,6 +161,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
myHashIdentity = theHashIdentity; myHashIdentity = theHashIdentity;
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashNormalizedPrefix == null && myDaoConfig != null) { if (myHashNormalizedPrefix == null && myDaoConfig != null) {

View File

@ -108,6 +108,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
setValue(theValue); setValue(theValue);
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashSystem == null) { if (myHashSystem == null) {

View File

@ -82,6 +82,7 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
setUri(theUri); setUri(theUri);
} }
@Override
@PrePersist @PrePersist
public void calculateHashes() { public void calculateHashes() {
if (myHashUri == null) { if (myHashUri == null) {

View File

@ -39,6 +39,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.util.ParametersUtil; import ca.uhn.fhir.util.ParametersUtil;
@ -307,7 +308,7 @@ public class SubscriptionTriggeringProvider implements IResourceProvider, Applic
ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex); ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex);
List<Long> resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex); List<Long> resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex);
ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex); ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), resourceIds.size());
int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex(); int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex();
for (Long next : resourceIds) { for (Long next : resourceIds) {
@ -374,9 +375,22 @@ public class SubscriptionTriggeringProvider implements IResourceProvider, Applic
msg.setNewPayload(myFhirContext, theResourceToTrigger); msg.setNewPayload(myFhirContext, theResourceToTrigger);
return myExecutorService.submit(()->{ return myExecutorService.submit(()->{
for (BaseSubscriptionInterceptor<?> next : mySubscriptionInterceptorList) { for (int i = 0; ; i++) {
next.submitResourceModified(msg); try {
for (BaseSubscriptionInterceptor<?> next : mySubscriptionInterceptorList) {
next.submitResourceModified(msg);
}
break;
} catch (Exception e) {
if (i >= 3) {
throw new InternalErrorException(e);
}
ourLog.warn("Exception while retriggering subscriptions (going to sleep and retry): {}", e.toString());
Thread.sleep(1000);
}
} }
return null; return null;
}); });

View File

@ -74,7 +74,6 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
private void deleteSearch(final Long theSearchPid) { private void deleteSearch(final Long theSearchPid) {
mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> { mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> {
ourLog.info("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
mySearchIncludeDao.deleteForSearch(searchToDelete.getId()); mySearchIncludeDao.deleteForSearch(searchToDelete.getId());
/* /*
@ -93,7 +92,10 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
// Only delete if we don't have results left in this search // Only delete if we don't have results left in this search
if (resultPids.getNumberOfElements() < max) { if (resultPids.getNumberOfElements() < max) {
ourLog.info("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
mySearchDao.deleteByPid(searchToDelete.getId()); mySearchDao.deleteByPid(searchToDelete.getId());
} else {
ourLog.info("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid());
} }
}); });
} }

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.ResultSeverityEnum; import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder; import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel; import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
@ -107,12 +108,18 @@ public class TestR4Config extends BaseJavaConfigR4 {
.create(retVal) .create(retVal)
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS) .logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
.countQuery(new ThreadQueryCountHolder()) // .countQuery(new ThreadQueryCountHolder())
.countQuery(singleQueryCountHolder())
.build(); .build();
return dataSource; return dataSource;
} }
@Bean
public SingleQueryCountHolder singleQueryCountHolder() {
return new SingleQueryCountHolder();
}
@Override @Override
@Bean() @Bean()
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory() {

View File

@ -4,7 +4,6 @@ import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap; import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.IdType;
@ -18,10 +17,9 @@ import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals;
import static org.hamcrest.Matchers.matchesPattern; import static org.junit.Assert.assertThat;
import static org.junit.Assert.*;
public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test { public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4CreateTest.class); private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4CreateTest.class);
@ -37,22 +35,22 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
Patient p = myFhirCtx.newXmlParser().parseResource(Patient.class, input); Patient p = myFhirCtx.newXmlParser().parseResource(Patient.class, input);
String id = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue(); String id = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap map= new SearchParameterMap(); SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add(Patient.SP_FAMILY, new StringParam("")); map.add(Patient.SP_FAMILY, new StringParam(""));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id)); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id));
map= new SearchParameterMap(); map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add(Patient.SP_GIVEN, new StringParam("")); map.add(Patient.SP_GIVEN, new StringParam(""));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id)); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id));
map= new SearchParameterMap(); map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add(Patient.SP_GIVEN, new StringParam("준수")); map.add(Patient.SP_GIVEN, new StringParam("준수"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id)); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(id));
map= new SearchParameterMap(); map = new SearchParameterMap();
map.setLoadSynchronous(true); map.setLoadSynchronous(true);
map.add(Patient.SP_GIVEN, new StringParam("")); // rightmost character only map.add(Patient.SP_GIVEN, new StringParam("")); // rightmost character only
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty()); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty());
@ -60,7 +58,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
} }
@Test @Test
public void testCreateWithUuidResourceStrategy() throws Exception { public void testCreateWithUuidResourceStrategy() {
myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID); myDaoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID);
Patient p = new Patient(); Patient p = new Patient();
@ -110,26 +108,6 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
assertThat(output.getEntry().get(1).getResponse().getLocation(), matchesPattern("Patient/[a-z0-9]{8}-.*")); assertThat(output.getEntry().get(1).getResponse().getLocation(), matchesPattern("Patient/[a-z0-9]{8}-.*"));
}
@Test
public void testWritesPerformMinimalSqlStatements() {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val1");
p.addIdentifier().setSystem("sys2").setValue("val2");
ourLog.info("** About to perform write");
new ThreadQueryCountHolder().getOrCreateQueryCount("").setInsert(0);
new ThreadQueryCountHolder().getOrCreateQueryCount("").setUpdate(0);
myPatientDao.create(p);
ourLog.info("** Done performing write");
ourLog.info("Inserts: {}", new ThreadQueryCountHolder().getOrCreateQueryCount("").getInsert());
ourLog.info("Updates: {}", new ThreadQueryCountHolder().getOrCreateQueryCount("").getUpdate());
} }

View File

@ -1,14 +1,19 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import net.ttddyy.dsproxy.QueryCount;
import net.ttddyy.dsproxy.QueryCountHolder; import net.ttddyy.dsproxy.QueryCountHolder;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.DateTimeType; import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.TestPropertySource;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -18,6 +23,8 @@ import static org.junit.Assert.assertEquals;
}) })
public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test { public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
@Autowired
private SingleQueryCountHolder myCountHolder;
@After @After
public void afterResetDao() { public void afterResetDao() {
@ -25,22 +32,87 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields()); myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
} }
@Test
public void testWritesPerformMinimalSqlStatements() {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val1");
p.addIdentifier().setSystem("sys2").setValue("val2");
ourLog.info("** About to perform write");
myCountHolder.clear();
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write");
assertEquals(6, getQueryCount().getInsert());
assertEquals(0, getQueryCount().getUpdate());
/*
* Not update the value
*/
p = new Patient();
p.setId(id);
p.addIdentifier().setSystem("sys1").setValue("val3");
p.addIdentifier().setSystem("sys2").setValue("val4");
ourLog.info("** About to perform write 2");
myCountHolder.clear();
myPatientDao.update(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write 2");
assertEquals(2, getQueryCount().getInsert());
assertEquals(1, getQueryCount().getUpdate());
assertEquals(1, getQueryCount().getDelete());
}
@Test
public void testSearch() {
for (int i = 0; i < 20; i++) {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val" + i);
myPatientDao.create(p);
}
myCountHolder.clear();
ourLog.info("** About to perform search");
IBundleProvider search = myPatientDao.search(new SearchParameterMap());
ourLog.info("** About to retrieve resources");
search.getResources(0, 20);
ourLog.info("** Done retrieving resources");
assertEquals(4, getQueryCount().getSelect());
assertEquals(2, getQueryCount().getInsert());
assertEquals(1, getQueryCount().getUpdate());
assertEquals(0, getQueryCount().getDelete());
}
private QueryCount getQueryCount() {
return myCountHolder.getQueryCountMap().get("");
}
@Test @Test
public void testCreateClientAssignedId() { public void testCreateClientAssignedId() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED); myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
QueryCountHolder.clear(); myCountHolder.clear();
ourLog.info("** Starting Update Non-Existing resource with client assigned ID"); ourLog.info("** Starting Update Non-Existing resource with client assigned ID");
Patient p = new Patient(); Patient p = new Patient();
p.setId("A"); p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless(); myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, QueryCountHolder.getGrandTotal().getSelect()); assertEquals(1, getQueryCount().getSelect());
assertEquals(4, QueryCountHolder.getGrandTotal().getInsert()); assertEquals(4, getQueryCount().getInsert());
assertEquals(0, QueryCountHolder.getGrandTotal().getDelete()); assertEquals(0, getQueryCount().getDelete());
// Because of the forced ID's bidirectional link HFJ_RESOURCE <-> HFJ_FORCED_ID // Because of the forced ID's bidirectional link HFJ_RESOURCE <-> HFJ_FORCED_ID
assertEquals(1, QueryCountHolder.getGrandTotal().getUpdate()); assertEquals(1, getQueryCount().getUpdate());
runInTransaction(() -> { runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count()); assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count()); assertEquals(1, myResourceHistoryTableDao.count());
@ -50,17 +122,17 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
// Ok how about an update // Ok how about an update
QueryCountHolder.clear(); myCountHolder.clear();
ourLog.info("** Starting Update Existing resource with client assigned ID"); ourLog.info("** Starting Update Existing resource with client assigned ID");
p = new Patient(); p = new Patient();
p.setId("A"); p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless(); myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(5, QueryCountHolder.getGrandTotal().getSelect()); assertEquals(5, getQueryCount().getSelect());
assertEquals(1, QueryCountHolder.getGrandTotal().getInsert()); assertEquals(1, getQueryCount().getInsert());
assertEquals(0, QueryCountHolder.getGrandTotal().getDelete()); assertEquals(0, getQueryCount().getDelete());
assertEquals(1, QueryCountHolder.getGrandTotal().getUpdate()); assertEquals(1, getQueryCount().getUpdate());
runInTransaction(() -> { runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count()); assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count()); assertEquals(2, myResourceHistoryTableDao.count());
@ -75,24 +147,24 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
public void testOneRowPerUpdate() { public void testOneRowPerUpdate() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED); myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
QueryCountHolder.clear(); myCountHolder.clear();
Patient p = new Patient(); Patient p = new Patient();
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless(); IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
assertEquals(3, QueryCountHolder.getGrandTotal().getInsert()); assertEquals(3, getQueryCount().getInsert());
runInTransaction(() -> { runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count()); assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count()); assertEquals(1, myResourceHistoryTableDao.count());
}); });
QueryCountHolder.clear(); myCountHolder.clear();
p = new Patient(); p = new Patient();
p.setId(id); p.setId(id);
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless(); myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, QueryCountHolder.getGrandTotal().getInsert()); assertEquals(1, getQueryCount().getInsert());
runInTransaction(() -> { runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count()); assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count()); assertEquals(2, myResourceHistoryTableDao.count());
@ -101,6 +173,34 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
} }
@Test
public void testUpdateReusesIndexes() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCountHolder.clear();
Patient pt = new Patient();
pt.setActive(true);
pt.addName().setFamily("FAMILY1").addGiven("GIVEN1A").addGiven("GIVEN1B");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
ourLog.info("Now have {} deleted", getQueryCount().getDelete());
ourLog.info("Now have {} inserts", getQueryCount().getInsert());
myCountHolder.clear();
ourLog.info("** About to update");
pt.setId(id);
pt.getNameFirstRep().addGiven("GIVEN1C");
myPatientDao.update(pt);
ourLog.info("Now have {} deleted", getQueryCount().getDelete());
ourLog.info("Now have {} inserts", getQueryCount().getInsert());
assertEquals(0, getQueryCount().getDelete());
assertEquals(2, getQueryCount().getInsert());
}
@AfterClass @AfterClass
public static void afterClassClearContext() { public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest(); TestUtil.clearAllStaticFieldsForUnitTest();

View File

@ -668,33 +668,6 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
} }
@Test
public void testUpdateReusesIndexes() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
QueryCountHolder.clear();
Patient pt = new Patient();
pt.setActive(true);
pt.addName().setFamily("FAMILY1").addGiven("GIVEN1A").addGiven("GIVEN1B");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
ourLog.info("Now have {} deleted", QueryCountHolder.getGrandTotal().getDelete());
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
QueryCountHolder.clear();
ourLog.info("** About to update");
pt.setId(id);
pt.getNameFirstRep().addGiven("GIVEN1C");
myPatientDao.update(pt);
ourLog.info("Now have {} deleted", QueryCountHolder.getGrandTotal().getDelete());
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
assertEquals(0, QueryCountHolder.getGrandTotal().getDelete());
assertEquals(4, QueryCountHolder.getGrandTotal().getInsert());
}
@Test @Test
public void testUpdateUnknownNumericIdFails() { public void testUpdateUnknownNumericIdFails() {
Patient p = new Patient(); Patient p = new Patient();

View File

@ -116,6 +116,11 @@
<![CDATA[<a href="https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java#L62">the example project</a>]]> <![CDATA[<a href="https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java#L62">the example project</a>]]>
if they are not already. if they are not already.
</action> </action>
<action type="fix">
When updating resources in the JPA server, a bug caused index table entries to be refreshed
sometimes even though the index value hadn't changed. This issue did not cause incorrect search
results but had an effect on write performance. This has been corrected.
</action>
</release> </release>
<release version="3.5.0" date="2018-09-17"> <release version="3.5.0" date="2018-09-17">