Ja inline references (#1252)

* Use a single IN clause for OR reference searches

* Add changelog

* One more attempt at getting the free port util to actually work... sigh

* One more tweak to free ports

* Bundle fix
This commit is contained in:
James Agnew 2019-03-26 10:42:42 +01:00 committed by GitHub
parent 9a54d7086e
commit e85bec2858
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 1024 additions and 656 deletions

View File

@ -24,10 +24,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.ServerSocket; import java.net.ServerSocket;
import java.net.Socket; import java.net.Socket;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
/** /**
@ -38,38 +40,38 @@ import java.util.List;
* for a long time (potentially lots of them!) and will leave your system low on * for a long time (potentially lots of them!) and will leave your system low on
* ports if you put it into production. * ports if you put it into production.
* </b></p> * </b></p>
* * <p>
* How it works: * How it works:
* * <p>
* We have lots of tests that need a free port because they want to open up * We have lots of tests that need a free port because they want to open up
* a server, and need the port to be unique and unused so that the tests can * a server, and need the port to be unique and unused so that the tests can
* run multithreaded. This turns out to just be an awful problem to solve for * run multithreaded. This turns out to just be an awful problem to solve for
* lots of reasons: * lots of reasons:
* * <p>
* 1. You can request a free port from the OS by calling <code>new ServerSocket(0);</code> * 1. You can request a free port from the OS by calling <code>new ServerSocket(0);</code>
* and this seems to work 99% of the time, but occasionally on a heavily loaded * and this seems to work 99% of the time, but occasionally on a heavily loaded
* server if two processes ask at the exact same time they will receive the * server if two processes ask at the exact same time they will receive the
* same port assignment, and one will fail. * same port assignment, and one will fail.
* 2. Tests run in separate processes, so we can't just rely on keeping a collection * 2. Tests run in separate processes, so we can't just rely on keeping a collection
* of assigned ports or anything like that. * of assigned ports or anything like that.
* * <p>
* So we solve this like this: * So we solve this like this:
* * <p>
* At random, this class will pick a "control port" and bind it. A control port * At random, this class will pick a "control port" and bind it. A control port
* is just a randomly chosen port that is a multiple of 100. If we can bind * is just a randomly chosen port that is a multiple of 100. If we can bind
* successfully to that port, we now own the range of "n+1 to n+99". If we can't * successfully to that port, we now own the range of "n+1 to n+99". If we can't
* bind that port, it means some other process has probably taken it so * bind that port, it means some other process has probably taken it so
* we'll just try again until we find an available control port. * we'll just try again until we find an available control port.
* * <p>
* Assuming we successfully bind a control port, we'll give out any available * Assuming we successfully bind a control port, we'll give out any available
* ports in the range "n+1 to n+99" until we've exhausted the whole set, and * ports in the range "n+1 to n+99" until we've exhausted the whole set, and
* then we'll pick another control port (if we actually get asked for over * then we'll pick another control port (if we actually get asked for over
* 100 ports.. this should be a rare event). * 100 ports.. this should be a rare event).
* * <p>
* This mechanism has the benefit of (fingers crossed) being bulletproof * This mechanism has the benefit of (fingers crossed) being bulletproof
* in terms of its ability to give out ports that are actually free, thereby * in terms of its ability to give out ports that are actually free, thereby
* preventing random test failures. * preventing random test failures.
* * <p>
* This mechanism has the drawback of never giving up a control port once * This mechanism has the drawback of never giving up a control port once
* it has assigned one. To be clear, this class is deliberately leaking * it has assigned one. To be clear, this class is deliberately leaking
* resources. Again, no production use! * resources. Again, no production use!
@ -106,6 +108,32 @@ public class PortUtil {
myCurrentControlSocketPort = null; myCurrentControlSocketPort = null;
} }
private static boolean isAvailable(int port) {
ServerSocket ss = null;
DatagramSocket ds = null;
try {
ss = new ServerSocket(port);
ss.setReuseAddress(true);
ds = new DatagramSocket(port);
ds.setReuseAddress(true);
return true;
} catch (IOException e) {
return false;
} finally {
if (ds != null) {
ds.close();
}
if (ss != null) {
try {
ss.close();
} catch (IOException e) {
/* should not be thrown */
}
}
}
}
/** /**
* Clear and release all control sockets * Clear and release all control sockets
*/ */
@ -149,48 +177,54 @@ public class PortUtil {
int nextCandidatePort = myCurrentControlSocketPort + myCurrentOffset; int nextCandidatePort = myCurrentControlSocketPort + myCurrentOffset;
// Try to open a port on this socket and use it // Try to open a port on this socket and use it
try (ServerSocket server = new ServerSocket()) { // try (ServerSocket server = new ServerSocket()) {
server.setReuseAddress(true); // server.setReuseAddress(true);
server.bind(new InetSocketAddress("localhost", nextCandidatePort)); // server.bind(new InetSocketAddress("localhost", nextCandidatePort));
try (Socket client = new Socket()) { // try (Socket client = new Socket()) {
client.setReuseAddress(true); // client.setReuseAddress(true);
client.connect(new InetSocketAddress("localhost", nextCandidatePort)); // client.connect(new InetSocketAddress("localhost", nextCandidatePort));
} // }
} catch (IOException e) { // } catch (IOException e) {
// continue;
// }
if (!isAvailable(nextCandidatePort)) {
continue; continue;
} }
// Log who asked for the port, just in case that's useful // Log who asked for the port, just in case that's useful
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
StackTraceElement previousElement = stackTraceElements[2]; StackTraceElement previousElement = Arrays.stream(stackTraceElements)
.filter(t -> !t.toString().contains("PortUtil.") && !t.toString().contains("getStackTrace"))
.findFirst()
.orElse(stackTraceElements[2]);
ourLog.info("Returned available port {} for: {}", nextCandidatePort, previousElement.toString()); ourLog.info("Returned available port {} for: {}", nextCandidatePort, previousElement.toString());
/* // /*
* This is an attempt to make sure the port is actually // * This is an attempt to make sure the port is actually
* free before releasing it. For whatever reason on Linux // * free before releasing it. For whatever reason on Linux
* it seems like even after we close the ServerSocket there // * it seems like even after we close the ServerSocket there
* is a short while where it is not possible to bind the // * is a short while where it is not possible to bind the
* port, even though it should be released by then. // * port, even though it should be released by then.
* // *
* I don't have any solid evidence that this is a good // * I don't have any solid evidence that this is a good
* way to do this, but it seems to help... // * way to do this, but it seems to help...
*/ // */
for (int i = 0; i < 10; i++) { // for (int i = 0; i < 10; i++) {
try (Socket client = new Socket()) { // try (Socket client = new Socket()) {
client.setReuseAddress(true); // client.setReuseAddress(true);
client.connect(new InetSocketAddress(nextCandidatePort), 1000); // client.connect(new InetSocketAddress(nextCandidatePort), 1000);
ourLog.info("Socket still seems open"); // ourLog.info("Socket still seems open");
Thread.sleep(250); // Thread.sleep(250);
} catch (Exception e) { // } catch (Exception e) {
break; // break;
} // }
} // }
//
try { // try {
Thread.sleep(250); // Thread.sleep(250);
} catch (InterruptedException theE) { // } catch (InterruptedException theE) {
// ignore // // ignore
} // }
return nextCandidatePort; return nextCandidatePort;

View File

@ -48,7 +48,7 @@ public class PortUtilTest {
int tasksCount = 20; int tasksCount = 20;
ExecutorService pool = Executors.newFixedThreadPool(tasksCount); ExecutorService pool = Executors.newFixedThreadPool(tasksCount);
int portsPerTaskCount = 51; int portsPerTaskCount = 151;
for (int i = 0; i < tasksCount; i++) { for (int i = 0; i < tasksCount; i++) {
pool.submit(() -> { pool.submit(() -> {
PortUtil portUtil = new PortUtil(); PortUtil portUtil = new PortUtil();

View File

@ -1002,6 +1002,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
throw new ResourceNotFoundException(theId); throw new ResourceNotFoundException(theId);
} }
validateGivenIdIsAppropriateToRetrieveResource(theId, entity); validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
entity.setTransientForcedId(theId.getIdPart());
return entity; return entity;
} }

View File

@ -391,7 +391,8 @@ public class SearchBuilder implements ISearchBuilder {
Join<ResourceTable, ResourceLink> join = createJoin(JoinEnum.REFERENCE, theParamName); Join<ResourceTable, ResourceLink> join = createJoin(JoinEnum.REFERENCE, theParamName);
List<Predicate> codePredicates = new ArrayList<>(); List<IIdType> targetIds = new ArrayList<>();
List<String> targetQualifiedUrls = new ArrayList<>();
for (int orIdx = 0; orIdx < theList.size(); orIdx++) { for (int orIdx = 0; orIdx < theList.size(); orIdx++) {
IQueryParameterType nextOr = theList.get(orIdx); IQueryParameterType nextOr = theList.get(orIdx);
@ -400,39 +401,73 @@ public class SearchBuilder implements ISearchBuilder {
ReferenceParam ref = (ReferenceParam) nextOr; ReferenceParam ref = (ReferenceParam) nextOr;
if (isBlank(ref.getChain())) { if (isBlank(ref.getChain())) {
/*
* Handle non-chained search, e.g. Patient?organization=Organization/123
*/
IIdType dt = new IdDt(ref.getBaseUrl(), ref.getResourceType(), ref.getIdPart(), null); IIdType dt = new IdDt(ref.getBaseUrl(), ref.getResourceType(), ref.getIdPart(), null);
if (dt.hasBaseUrl()) { if (dt.hasBaseUrl()) {
if (myDaoConfig.getTreatBaseUrlsAsLocal().contains(dt.getBaseUrl())) { if (myDaoConfig.getTreatBaseUrlsAsLocal().contains(dt.getBaseUrl())) {
dt = dt.toUnqualified(); dt = dt.toUnqualified();
targetIds.add(dt);
} else { } else {
ourLog.debug("Searching for resource link with target URL: {}", dt.getValue()); targetQualifiedUrls.add(dt.getValue());
Predicate eq = myBuilder.equal(join.get("myTargetResourceUrl"), dt.getValue());
codePredicates.add(eq);
continue;
} }
} else {
targetIds.add(dt);
} }
List<Long> targetPid; } else {
try {
targetPid = myIdHelperService.translateForcedIdToPids(dt);
} catch (ResourceNotFoundException e) {
// Use a PID that will never exist
targetPid = Collections.singletonList(-1L);
}
for (Long next : targetPid) {
ourLog.debug("Searching for resource link with target PID: {}", next);
/*
* Handle chained search, e.g. Patient?organization.name=Kwik-e-mart
*/
addPredicateReferenceWithChain(theResourceName, theParamName, theList, join, new ArrayList<>(), ref);
return;
}
} else {
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
}
}
List<Predicate> codePredicates = new ArrayList<>();
// Resources by ID
List<Long> targetPids = myIdHelperService.translateForcedIdToPids(targetIds);
if (!targetPids.isEmpty()) {
ourLog.debug("Searching for resource link with target PIDs: {}", targetPids);
Predicate pathPredicate = createResourceLinkPathPredicate(theResourceName, theParamName, join); Predicate pathPredicate = createResourceLinkPathPredicate(theResourceName, theParamName, join);
Predicate pidPredicate = myBuilder.equal(join.get("myTargetResourcePid"), next); Predicate pidPredicate = join.get("myTargetResourcePid").in(targetPids);
codePredicates.add(myBuilder.and(pathPredicate, pidPredicate)); codePredicates.add(myBuilder.and(pathPredicate, pidPredicate));
} }
} else { // Resources by fully qualified URL
if (!targetQualifiedUrls.isEmpty()) {
ourLog.debug("Searching for resource link with target URLs: {}", targetQualifiedUrls);
Predicate eq = join.get("myTargetResourceUrl").in(targetQualifiedUrls);
codePredicates.add(eq);
}
if (codePredicates.size() > 0) {
myPredicates.add(myBuilder.or(toArray(codePredicates)));
} else {
// Add a predicate that will never match
Predicate pidPredicate = join.get("myTargetResourcePid").in(-1L);
myPredicates.clear();
myPredicates.add(pidPredicate);
}
}
private void addPredicateReferenceWithChain(String theResourceName, String theParamName, List<? extends IQueryParameterType> theList, Join<ResourceTable, ResourceLink> theJoin, List<Predicate> theCodePredicates, ReferenceParam theRef) {
final List<Class<? extends IBaseResource>> resourceTypes; final List<Class<? extends IBaseResource>> resourceTypes;
String resourceId; String resourceId;
if (!ref.getValue().matches("[a-zA-Z]+/.*")) { if (!theRef.getValue().matches("[a-zA-Z]+/.*")) {
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
resourceTypes = new ArrayList<>(); resourceTypes = new ArrayList<>();
@ -490,16 +525,16 @@ public class SearchBuilder implements ISearchBuilder {
} }
} }
resourceId = ref.getValue(); resourceId = theRef.getValue();
} else { } else {
try { try {
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType()); RuntimeResourceDefinition resDef = myContext.getResourceDefinition(theRef.getResourceType());
resourceTypes = new ArrayList<>(1); resourceTypes = new ArrayList<>(1);
resourceTypes.add(resDef.getImplementingClass()); resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart(); resourceId = theRef.getIdPart();
} catch (DataFormatException e) { } catch (DataFormatException e) {
throw new InvalidRequestException("Invalid resource type: " + ref.getResourceType()); throw new InvalidRequestException("Invalid resource type: " + theRef.getResourceType());
} }
} }
@ -507,7 +542,7 @@ public class SearchBuilder implements ISearchBuilder {
for (Class<? extends IBaseResource> nextType : resourceTypes) { for (Class<? extends IBaseResource> nextType : resourceTypes) {
String chain = ref.getChain(); String chain = theRef.getChain();
String remainingChain = null; String remainingChain = null;
int chainDotIndex = chain.indexOf('.'); int chainDotIndex = chain.indexOf('.');
if (chainDotIndex != -1) { if (chainDotIndex != -1) {
@ -555,29 +590,18 @@ public class SearchBuilder implements ISearchBuilder {
Subquery<Long> subQ = createLinkSubquery(foundChainMatch, chain, subResourceName, orValues); Subquery<Long> subQ = createLinkSubquery(foundChainMatch, chain, subResourceName, orValues);
Predicate pathPredicate = createResourceLinkPathPredicate(theResourceName, theParamName, join); Predicate pathPredicate = createResourceLinkPathPredicate(theResourceName, theParamName, theJoin);
Predicate pidPredicate = join.get("myTargetResourcePid").in(subQ); Predicate pidPredicate = theJoin.get("myTargetResourcePid").in(subQ);
Predicate andPredicate = myBuilder.and(pathPredicate, pidPredicate); Predicate andPredicate = myBuilder.and(pathPredicate, pidPredicate);
codePredicates.add(andPredicate); theCodePredicates.add(andPredicate);
} }
if (!foundChainMatch) { if (!foundChainMatch) {
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + ref.getChain())); throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + theRef.getChain()));
} }
myPredicates.add(myBuilder.or(toArray(codePredicates))); myPredicates.add(myBuilder.or(toArray(theCodePredicates)));
return;
}
} else {
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
}
}
myPredicates.add(myBuilder.or(toArray(codePredicates)));
} }
private Subquery<Long> createLinkSubquery(boolean theFoundChainMatch, String theChain, String theSubResourceName, List<IQueryParameterType> theOrValues) { private Subquery<Long> createLinkSubquery(boolean theFoundChainMatch, String theChain, String theSubResourceName, List<IQueryParameterType> theOrValues) {

View File

@ -31,15 +31,14 @@ import ca.uhn.fhir.jpa.model.entity.ForcedId;
public interface IForcedIdDao extends JpaRepository<ForcedId, Long> { public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
@Query("SELECT f FROM ForcedId f WHERE myForcedId = :forced_id") // FIXME: JA We should log a performance warning if this is used since it's not indexed
public List<ForcedId> findByForcedId(@Param("forced_id") String theForcedId); @Query("SELECT f.myResourcePid FROM ForcedId f WHERE myForcedId IN (:forced_id)")
List<Long> findByForcedId(@Param("forced_id") Collection<String> theForcedId);
@Query("SELECT f FROM ForcedId f WHERE myResourceType = :resource_type AND myForcedId = :forced_id") @Query("SELECT f.myResourcePid FROM ForcedId f WHERE myResourceType = :resource_type AND myForcedId IN (:forced_id)")
public List<ForcedId> findByTypeAndForcedId(@Param("resource_type") String theResourceType, @Param("forced_id") String theForcedId); List<Long> findByTypeAndForcedId(@Param("resource_type") String theResourceType, @Param("forced_id") Collection<String> theForcedId);
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid") @Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
public ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid); ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid in (:pids)")
Collection<ForcedId> findByResourcePids(@Param("pids") Collection<Long> pids);
} }

View File

@ -62,6 +62,7 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
Long valueOf; Long valueOf;
try { try {
valueOf = myIdHelperService.translateForcedIdToPid(theTypeString, theId); valueOf = myIdHelperService.translateForcedIdToPid(theTypeString, theId);
ourLog.trace("Translated {}/{} to resource PID {}", theType, theId, valueOf);
} catch (ResourceNotFoundException e) { } catch (ResourceNotFoundException e) {
if (myDaoConfig.isEnforceReferentialIntegrityOnWrite() == false) { if (myDaoConfig.isEnforceReferentialIntegrityOnWrite() == false) {
return null; return null;
@ -86,7 +87,9 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
throw new InvalidRequestException("Resource " + resName + "/" + theId + " not found, specified in path: " + theNextPathsUnsplit); throw new InvalidRequestException("Resource " + resName + "/" + theId + " not found, specified in path: " + theNextPathsUnsplit);
} }
ourLog.trace("Resource PID {} is of type {}", valueOf, target.getResourceType());
if (!theTypeString.equals(target.getResourceType())) { if (!theTypeString.equals(target.getResourceType())) {
ourLog.error("Resource {} with PID {} was not of type {}", target.getIdDt().getValue(), target.getId(), theTypeString);
throw new UnprocessableEntityException( throw new UnprocessableEntityException(
"Resource contains reference to " + theNextId.getValue() + " but resource with ID " + theNextId.getIdPart() + " is actually of type " + target.getResourceType()); "Resource contains reference to " + theNextId.getValue() + " but resource with ID " + theNextId.getIdPart() + " is actually of type " + target.getResourceType());
} }

View File

@ -23,16 +23,19 @@ package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.model.dstu2.resource.Specimen;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.List; import static org.apache.commons.lang3.StringUtils.isBlank;
@Service @Service
public class IdHelperService { public class IdHelperService {
@ -45,37 +48,54 @@ public class IdHelperService {
myForcedIdDao.delete(forcedId); myForcedIdDao.delete(forcedId);
} }
public Long translateForcedIdToPid(String theResourceName, String theResourceId) { public Long translateForcedIdToPid(String theResourceName, String theResourceId) throws ResourceNotFoundException {
return translateForcedIdToPids(myDaoConfig, new IdDt(theResourceName, theResourceId), myForcedIdDao).get(0); // We only pass 1 input in so only 0..1 will come back
IdDt id = new IdDt(theResourceName, theResourceId);
List<Long> matches = translateForcedIdToPids(myDaoConfig, myForcedIdDao, Collections.singletonList(id));
assert matches.size() <= 1;
if (matches.isEmpty()) {
throw new ResourceNotFoundException(id);
}
return matches.get(0);
} }
public List<Long> translateForcedIdToPids(IIdType theId) { public List<Long> translateForcedIdToPids(Collection<IIdType> theId) {
return IdHelperService.translateForcedIdToPids(myDaoConfig, theId, myForcedIdDao); return IdHelperService.translateForcedIdToPids(myDaoConfig, myForcedIdDao, theId);
} }
static List<Long> translateForcedIdToPids(DaoConfig theDaoConfig, IIdType theId, IForcedIdDao theForcedIdDao) { static List<Long> translateForcedIdToPids(DaoConfig theDaoConfig, IForcedIdDao theForcedIdDao, Collection<IIdType> theId) {
Validate.isTrue(theId.hasIdPart()); theId.forEach(id -> Validate.isTrue(id.hasIdPart()));
if (theDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY && isValidPid(theId)) { if (theId.isEmpty()) {
return Collections.singletonList(theId.getIdPartAsLong()); return Collections.emptyList();
}
List<Long> retVal = new ArrayList<>();
ListMultimap<String, String> typeToIds = MultimapBuilder.hashKeys().arrayListValues().build();
for (IIdType nextId : theId) {
if (theDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY && isValidPid(nextId)) {
retVal.add(nextId.getIdPartAsLong());
} else { } else {
List<ForcedId> forcedId; if (nextId.hasResourceType()) {
if (theId.hasResourceType()) { typeToIds.put(nextId.getResourceType(), nextId.getIdPart());
forcedId = theForcedIdDao.findByTypeAndForcedId(theId.getResourceType(), theId.getIdPart());
} else { } else {
forcedId = theForcedIdDao.findByForcedId(theId.getIdPart()); typeToIds.put("", nextId.getIdPart());
}
}
} }
if (!forcedId.isEmpty()) { for (Map.Entry<String, Collection<String>> nextEntry : typeToIds.asMap().entrySet()) {
List<Long> retVal = new ArrayList<>(forcedId.size()); String nextResourceType = nextEntry.getKey();
for (ForcedId next : forcedId) { Collection<String> nextIds = nextEntry.getValue();
retVal.add(next.getResourcePid()); if (isBlank(nextResourceType)) {
retVal.addAll(theForcedIdDao.findByForcedId(nextIds));
} else {
retVal.addAll(theForcedIdDao.findByTypeAndForcedId(nextResourceType, nextIds));
} }
}
return retVal; return retVal;
} else {
throw new ResourceNotFoundException(theId);
}
}
} }
public String translatePidIdToForcedId(String theResourceType, Long theId) { public String translatePidIdToForcedId(String theResourceType, Long theId) {

View File

@ -193,8 +193,7 @@ public class SearchParamWithInlineReferencesExtractor {
for (String nextQueryString : queryStringsToPopulate) { for (String nextQueryString : queryStringsToPopulate) {
if (isNotBlank(nextQueryString)) { if (isNotBlank(nextQueryString)) {
// FIXME: JA change to trace ourLog.trace("Adding composite unique SP: {}", nextQueryString);
ourLog.info("Adding composite unique SP: {}", nextQueryString);
theParams.myCompositeStringUniques.add(new ResourceIndexedCompositeStringUnique(theEntity, nextQueryString)); theParams.myCompositeStringUniques.add(new ResourceIndexedCompositeStringUnique(theEntity, nextQueryString));
} }
} }

View File

@ -24,22 +24,36 @@ import net.ttddyy.dsproxy.ExecutionInfo;
import net.ttddyy.dsproxy.QueryInfo; import net.ttddyy.dsproxy.QueryInfo;
import net.ttddyy.dsproxy.proxy.ParameterSetOperation; import net.ttddyy.dsproxy.proxy.ParameterSetOperation;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.engine.jdbc.internal.BasicFormatterImpl; import org.hibernate.engine.jdbc.internal.BasicFormatterImpl;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.trim;
public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuilder.SingleQueryExecution { public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuilder.SingleQueryExecution {
private boolean myCaptureQueryStackTrace;
/**
* This has an impact on performance! Use with caution.
*/
public boolean isCaptureQueryStackTrace() {
return myCaptureQueryStackTrace;
}
/**
* This has an impact on performance! Use with caution.
*/
public void setCaptureQueryStackTrace(boolean theCaptureQueryStackTrace) {
myCaptureQueryStackTrace = theCaptureQueryStackTrace;
}
@Override @Override
public void execute(ExecutionInfo theExecutionInfo, List<QueryInfo> theQueryInfoList) { public void execute(ExecutionInfo theExecutionInfo, List<QueryInfo> theQueryInfoList) {
final Queue<Query> queryList = provideQueryList(); final Queue<Query> queryList = provideQueryList();
for (QueryInfo next : theQueryInfoList) { for (QueryInfo next : theQueryInfoList) {
String sql = StringUtils.trim(next.getQuery()); String sql = trim(next.getQuery());
List<String> params; List<String> params;
if (next.getParametersList().size() > 0 && next.getParametersList().get(0).size() > 0) { if (next.getParametersList().size() > 0 && next.getParametersList().get(0).size() > 0) {
List<ParameterSetOperation> values = next List<ParameterSetOperation> values = next
@ -53,9 +67,14 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild
params = Collections.emptyList(); params = Collections.emptyList();
} }
StackTraceElement[] stackTraceElements = null;
if (isCaptureQueryStackTrace()) {
stackTraceElements = Thread.currentThread().getStackTrace();
}
long elapsedTime = theExecutionInfo.getElapsedTime(); long elapsedTime = theExecutionInfo.getElapsedTime();
long startTime = System.currentTimeMillis() - elapsedTime; long startTime = System.currentTimeMillis() - elapsedTime;
queryList.add(new Query(sql, params, startTime, elapsedTime)); queryList.add(new Query(sql, params, startTime, elapsedTime, stackTraceElements));
} }
} }
@ -67,12 +86,14 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild
private final List<String> myParams; private final List<String> myParams;
private final long myQueryTimestamp; private final long myQueryTimestamp;
private final long myElapsedTime; private final long myElapsedTime;
private final StackTraceElement[] myStackTrace;
Query(String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime) { Query(String theSql, List<String> theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements) {
mySql = theSql; mySql = theSql;
myParams = Collections.unmodifiableList(theParams); myParams = Collections.unmodifiableList(theParams);
myQueryTimestamp = theQueryTimestamp; myQueryTimestamp = theQueryTimestamp;
myElapsedTime = theElapsedTime; myElapsedTime = theElapsedTime;
myStackTrace = theStackTraceElements;
} }
public long getQueryTimestamp() { public long getQueryTimestamp() {
@ -113,10 +134,13 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild
} }
} }
return retVal; return trim(retVal);
} }
public StackTraceElement[] getStackTrace() {
return myStackTrace;
}
} }
} }

View File

@ -30,6 +30,7 @@ import org.slf4j.LoggerFactory;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
/** /**
* This is a query listener designed to be plugged into a {@link ProxyDataSourceBuilder proxy DataSource}. * This is a query listener designed to be plugged into a {@link ProxyDataSourceBuilder proxy DataSource}.
@ -70,27 +71,77 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
return Collections.unmodifiableList(retVal); return Collections.unmodifiableList(retVal);
} }
private List<Query> getQueriesForCurrentThreadStartingWith(String theStart) {
String threadName = Thread.currentThread().getName();
return getQueriesStartingWith(theStart, threadName);
}
private List<Query> getQueriesStartingWith(String theStart, String theThreadName) {
return getCapturedQueries()
.stream()
.filter(t -> theThreadName == null || t.getThreadName().equals(theThreadName))
.filter(t -> t.getSql(false, false).toLowerCase().startsWith(theStart))
.collect(Collectors.toList());
}
private List<Query> getQueriesStartingWith(String theStart) {
return getQueriesStartingWith(theStart, null);
}
/**
* Returns all SELECT queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getSelectQueries() {
return getQueriesStartingWith("select");
}
/**
* Returns all INSERT queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getInsertQueries() {
return getQueriesStartingWith("insert");
}
/**
* Returns all UPDATE queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getUpdateQueries() {
return getQueriesStartingWith("update");
}
/**
* Returns all UPDATE queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getDeleteQueries() {
return getQueriesStartingWith("delete");
}
/** /**
* Returns all SELECT queries executed on the current thread - Index 0 is oldest * Returns all SELECT queries executed on the current thread - Index 0 is oldest
*/ */
public List<Query> getSelectQueriesForCurrentThread() { public List<Query> getSelectQueriesForCurrentThread() {
String currentThreadName = Thread.currentThread().getName(); return getQueriesForCurrentThreadStartingWith("select");
return getCapturedQueries()
.stream()
.filter(t -> t.getThreadName().equals(currentThreadName))
.filter(t -> t.getSql(false, false).toLowerCase().contains("select"))
.collect(Collectors.toList());
} }
/** /**
* Returns all INSERT queries executed on the current thread - Index 0 is oldest * Returns all INSERT queries executed on the current thread - Index 0 is oldest
*/ */
public List<Query> getInsertQueriesForCurrentThread() { public List<Query> getInsertQueriesForCurrentThread() {
return getCapturedQueries() return getQueriesForCurrentThreadStartingWith("insert");
.stream() }
.filter(t -> t.getThreadName().equals(Thread.currentThread().getName()))
.filter(t -> t.getSql(false, false).toLowerCase().contains("insert")) /**
.collect(Collectors.toList()); * Returns all UPDATE queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getUpdateQueriesForCurrentThread() {
return getQueriesForCurrentThreadStartingWith("update");
}
/**
* Returns all UPDATE queries executed on the current thread - Index 0 is oldest
*/
public List<Query> getDeleteQueriesForCurrentThread() {
return getQueriesForCurrentThreadStartingWith("delete");
} }
/** /**
@ -104,6 +155,17 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
ourLog.info("Select Queries:\n{}", String.join("\n", queries)); ourLog.info("Select Queries:\n{}", String.join("\n", queries));
} }
/**
* Log all captured SELECT queries
*/
public void logSelectQueries() {
List<String> queries = getSelectQueries()
.stream()
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
.collect(Collectors.toList());
ourLog.info("Select Queries:\n{}", String.join("\n", queries));
}
/** /**
* Log first captured SELECT query * Log first captured SELECT query
*/ */
@ -127,9 +189,67 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
ourLog.info("Insert Queries:\n{}", String.join("\n", queries)); ourLog.info("Insert Queries:\n{}", String.join("\n", queries));
} }
/**
* Log all captured INSERT queries
*/
public void logInsertQueries() {
List<String> queries = getInsertQueries()
.stream()
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
.collect(Collectors.toList());
ourLog.info("Insert Queries:\n{}", String.join("\n", queries));
}
public int countSelectQueries() {
return getSelectQueries().size();
}
public int countInsertQueries() {
return getInsertQueries().size();
}
public int countUpdateQueries() {
return getUpdateQueries().size();
}
public int countDeleteQueries() {
return getDeleteQueries().size();
}
public int countSelectQueriesForCurrentThread() {
return getSelectQueriesForCurrentThread().size();
}
public int countInsertQueriesForCurrentThread() {
return getInsertQueriesForCurrentThread().size();
}
public int countUpdateQueriesForCurrentThread() {
return getUpdateQueriesForCurrentThread().size();
}
public int countDeleteQueriesForCurrentThread() {
return getDeleteQueriesForCurrentThread().size();
}
private static String formatQueryAsSql(Query theQuery) { private static String formatQueryAsSql(Query theQuery) {
String formattedSql = theQuery.getSql(true, true); String formattedSql = theQuery.getSql(true, true);
return "Query at " + new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString() + " took " + StopWatch.formatMillis(theQuery.getElapsedTime()) + " on Thread: " + theQuery.getThreadName() + "\nSQL:\n" + formattedSql; StringBuilder b = new StringBuilder();
b.append("Query at ");
b.append(new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString());
b.append(" took ").append(StopWatch.formatMillis(theQuery.getElapsedTime()));
b.append(" on Thread: ").append(theQuery.getThreadName());
b.append("\nSQL:\n").append(formattedSql);
if (theQuery.getStackTrace() != null) {
b.append("\nStack:\n ");
Stream<String> stackTraceStream = Arrays.stream(theQuery.getStackTrace())
.map(StackTraceElement::toString)
.filter(t->t.startsWith("ca."));
b.append(stackTraceStream.collect(Collectors.joining("\n ")));
}
b.append("\n");
return b.toString();
} }
} }

View File

@ -1,348 +0,0 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.util.TestUtil;
import net.ttddyy.dsproxy.QueryCount;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.TestPropertySource;
import static org.junit.Assert.assertEquals;
@TestPropertySource(properties = {
"scheduling_disabled=true"
})
public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
@Autowired
private SingleQueryCountHolder myCountHolder;
@After
public void afterResetDao() {
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
}
@Test
public void testWritesPerformMinimalSqlStatements() {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val1");
p.addIdentifier().setSystem("sys2").setValue("val2");
ourLog.info("** About to perform write");
myCountHolder.clear();
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write");
assertEquals(6, getQueryCount().getInsert());
assertEquals(0, getQueryCount().getUpdate());
/*
* Not update the value
*/
p = new Patient();
p.setId(id);
p.addIdentifier().setSystem("sys1").setValue("val3");
p.addIdentifier().setSystem("sys2").setValue("val4");
ourLog.info("** About to perform write 2");
myCountHolder.clear();
myPatientDao.update(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write 2");
assertEquals(1, getQueryCount().getInsert());
assertEquals(2, getQueryCount().getUpdate());
assertEquals(0, getQueryCount().getDelete());
}
@Test
public void testSearch() {
for (int i = 0; i < 20; i++) {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val" + i);
myPatientDao.create(p);
}
myCountHolder.clear();
ourLog.info("** About to perform search");
IBundleProvider search = myPatientDao.search(new SearchParameterMap());
ourLog.info("** About to retrieve resources");
search.getResources(0, 20);
ourLog.info("** Done retrieving resources");
assertEquals(4, getQueryCount().getSelect());
assertEquals(2, getQueryCount().getInsert());
assertEquals(1, getQueryCount().getUpdate());
assertEquals(0, getQueryCount().getDelete());
}
private QueryCount getQueryCount() {
return myCountHolder.getQueryCountMap().get("");
}
@Test
public void testCreateClientAssignedId() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCountHolder.clear();
ourLog.info("** Starting Update Non-Existing resource with client assigned ID");
Patient p = new Patient();
p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, getQueryCount().getSelect());
assertEquals(4, getQueryCount().getInsert());
assertEquals(0, getQueryCount().getDelete());
// Because of the forced ID's bidirectional link HFJ_RESOURCE <-> HFJ_FORCED_ID
assertEquals(1, getQueryCount().getUpdate());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count());
assertEquals(1, myForcedIdDao.count());
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
});
// Ok how about an update
myCountHolder.clear();
ourLog.info("** Starting Update Existing resource with client assigned ID");
p = new Patient();
p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(4, getQueryCount().getSelect());
assertEquals(1, getQueryCount().getInsert());
assertEquals(0, getQueryCount().getDelete());
assertEquals(1, getQueryCount().getUpdate());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count());
assertEquals(1, myForcedIdDao.count());
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
});
}
@Test
public void testOneRowPerUpdate() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCountHolder.clear();
Patient p = new Patient();
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
assertEquals(3, getQueryCount().getInsert());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count());
});
myCountHolder.clear();
p = new Patient();
p.setId(id);
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, getQueryCount().getInsert());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count());
});
}
@Test
public void testUpdateReusesIndexes() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCountHolder.clear();
Patient pt = new Patient();
pt.setActive(true);
pt.addName().setFamily("FAMILY1").addGiven("GIVEN1A").addGiven("GIVEN1B");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCountHolder.clear();
ourLog.info("** About to update");
pt.setId(id);
pt.getNameFirstRep().addGiven("GIVEN1C");
myPatientDao.update(pt);
assertEquals(0, getQueryCount().getDelete());
assertEquals(2, getQueryCount().getInsert());
}
@Test
public void testUpdateReusesIndexesString() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("family", new StringParam("family1")).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("family", new StringParam("family2")).setLoadSynchronous(true);
myCountHolder.clear();
Patient pt = new Patient();
pt.addName().setFamily("FAMILY1");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCountHolder.clear();
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
ourLog.info("** About to update");
pt = new Patient();
pt.setId(id);
pt.addName().setFamily("FAMILY2");
myPatientDao.update(pt);
assertEquals(0, getQueryCount().getDelete());
assertEquals(1, getQueryCount().getInsert()); // Add an entry to HFJ_RES_VER
assertEquals(2, getQueryCount().getUpdate()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
}
@Test
public void testUpdateReusesIndexesToken() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("gender", new TokenParam("male")).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("gender", new TokenParam("female")).setLoadSynchronous(true);
myCountHolder.clear();
Patient pt = new Patient();
pt.setGender(Enumerations.AdministrativeGender.MALE);
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
assertEquals(0, getQueryCount().getSelect());
assertEquals(0, getQueryCount().getDelete());
assertEquals(3, getQueryCount().getInsert());
assertEquals(0, getQueryCount().getUpdate());
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
/*
* Change a value
*/
ourLog.info("** About to update");
myCountHolder.clear();
pt = new Patient();
pt.setId(id);
pt.setGender(Enumerations.AdministrativeGender.FEMALE);
myPatientDao.update(pt);
/*
* Current SELECTs:
* Select the resource from HFJ_RESOURCE
* Select the version from HFJ_RES_VER
* Select the current token indexes
*/
assertEquals(3, getQueryCount().getSelect());
assertEquals(0, getQueryCount().getDelete());
assertEquals(1, getQueryCount().getInsert()); // Add an entry to HFJ_RES_VER
assertEquals(2, getQueryCount().getUpdate()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
myCountHolder.clear();
/*
* Drop a value
*/
ourLog.info("** About to update again");
pt = new Patient();
pt.setId(id);
myPatientDao.update(pt);
assertEquals(1, getQueryCount().getDelete());
assertEquals(1, getQueryCount().getInsert());
assertEquals(1, getQueryCount().getUpdate());
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
}
@Test
public void testUpdateReusesIndexesResourceLink() {
Organization org1 = new Organization();
org1.setName("org1");
IIdType orgId1 = myOrganizationDao.create(org1).getId().toUnqualifiedVersionless();
Organization org2 = new Organization();
org2.setName("org2");
IIdType orgId2 = myOrganizationDao.create(org2).getId().toUnqualifiedVersionless();
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("organization", new ReferenceParam(orgId1.getValue())).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("organization", new ReferenceParam(orgId2.getValue())).setLoadSynchronous(true);
myCountHolder.clear();
Patient pt = new Patient();
pt.getManagingOrganization().setReference(orgId1.getValue());
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCountHolder.clear();
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
ourLog.info("** About to update");
pt = new Patient();
pt.setId(id);
pt.getManagingOrganization().setReference(orgId2.getValue());
myPatientDao.update(pt);
assertEquals(0, getQueryCount().getDelete());
assertEquals(1, getQueryCount().getInsert()); // Add an entry to HFJ_RES_VER
assertEquals(2, getQueryCount().getUpdate()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -2166,14 +2166,20 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", patientId01, patientId02, obsId01, obsId02, drId01); ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", patientId01, patientId02, obsId01, obsId02, drId01);
List<Observation> result = toList( List<Observation> result;
myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId01"))));
// With an ID that exists
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId01"))));
assertEquals(1, result.size()); assertEquals(1, result.size());
assertEquals(obsId01.getIdPart(), result.get(0).getIdElement().getIdPart()); assertEquals(obsId01.getIdPart(), result.get(0).getIdElement().getIdPart());
// Now with an alphanumeric ID that doesn't exist
myCaptureQueriesListener.clear();
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId99")))); result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId99"))));
assertEquals(0, result.size()); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(result.toString(),0, result.size());
// And with a numeric ID that doesn't exist
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("999999999999999")))); result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("999999999999999"))));
assertEquals(0, result.size()); assertEquals(0, result.size());

View File

@ -1,37 +1,47 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchStatusEnum; import ca.uhn.fhir.jpa.entity.SearchStatusEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum; import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.SummaryEnum; import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.*;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.springframework.aop.framework.AopProxyUtils; import org.springframework.aop.framework.AopProxyUtils;
import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean; import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean;
import org.springframework.test.context.TestPropertySource;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.leftPad; import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@TestPropertySource(properties = {
"scheduling_disabled=true"
})
@SuppressWarnings({"unchecked", "deprecation", "Duplicates"}) @SuppressWarnings({"unchecked", "deprecation", "Duplicates"})
public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@ -41,6 +51,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Before @Before
public void before() { public void before() {
mySearchCoordinatorSvcImpl = (SearchCoordinatorSvcImpl) AopProxyUtils.getSingletonTarget(mySearchCoordinatorSvc); mySearchCoordinatorSvcImpl = (SearchCoordinatorSvcImpl) AopProxyUtils.getSingletonTarget(mySearchCoordinatorSvc);
myCaptureQueriesListener.setCaptureQueryStackTrace(true);
} }
@After @After
@ -48,10 +59,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(null); mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(null);
mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(SearchCoordinatorSvcImpl.DEFAULT_SYNC_SIZE); mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(SearchCoordinatorSvcImpl.DEFAULT_SYNC_SIZE);
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds()); myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
myCaptureQueriesListener.setCaptureQueryStackTrace(false);
} }
@Before private void create200Patients() {
public void start() {
runInTransaction(() -> { runInTransaction(() -> {
for (int i = 0; i < 200; i++) { for (int i = 0; i < 200; i++) {
Patient p = new Patient(); Patient p = new Patient();
@ -65,6 +76,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchCountOnly() { public void testFetchCountOnly() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
@ -82,6 +94,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchCountWithMultipleIndexesOnOneResource() { public void testFetchCountWithMultipleIndexesOnOneResource() {
create200Patients();
// Already have 200, let's add number 201 with a bunch of similar names // Already have 200, let's add number 201 with a bunch of similar names
Patient p = new Patient(); Patient p = new Patient();
@ -136,6 +149,8 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchTotalAccurateForSlowLoading() { public void testFetchTotalAccurateForSlowLoading() {
create200Patients();
mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(25); mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(25);
mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(10); mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(10);
@ -164,6 +179,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchCountAndData() { public void testFetchCountAndData() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
@ -200,6 +216,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchRightUpToActualNumberExistingThenFetchAnotherPage() { public void testFetchRightUpToActualNumberExistingThenFetchAnotherPage() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(200, -1)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(200, -1));
@ -254,6 +271,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchOnlySmallBatches() { public void testFetchOnlySmallBatches() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
@ -379,6 +397,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchMoreThanFirstPageSizeInFirstPage() { public void testFetchMoreThanFirstPageSizeInFirstPage() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1));
@ -414,6 +433,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchUnlimited() { public void testFetchUnlimited() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1));
@ -472,7 +492,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testFetchSecondBatchInManyThreads() throws Throwable { public void testFetchSecondBatchInManyThreads() throws Throwable {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, -1));
/* /*
@ -541,6 +561,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
@Test @Test
public void testSearchThatOnlyReturnsASmallResult() { public void testSearchThatOnlyReturnsASmallResult() {
create200Patients();
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190)); myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
@ -568,6 +589,439 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
} }
/**
* A search with a big list of OR clauses for references should use a single SELECT ... WHERE .. IN
* and not a whole bunch of SQL ORs.
*/
@Test
public void testReferenceOrLinksUseInList() {
List<Long> ids = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Organization org = new Organization();
org.setActive(true);
ids.add(myOrganizationDao.create(org).getId().getIdPartAsLong());
}
for (int i = 0; i < 5; i++) {
Patient pt = new Patient();
pt.setManagingOrganization(new Reference("Organization/" + ids.get(i)));
myPatientDao.create(pt).getId().getIdPartAsLong();
}
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceOrListParam()
.addOr(new ReferenceParam("Organization/" + ids.get(0)))
.addOr(new ReferenceParam("Organization/" + ids.get(1)))
.addOr(new ReferenceParam("Organization/" + ids.get(2)))
.addOr(new ReferenceParam("Organization/" + ids.get(3)))
.addOr(new ReferenceParam("Organization/" + ids.get(4)))
);
map.setLoadSynchronous(true);
IBundleProvider search = myPatientDao.search(map);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
List<String> queries = myCaptureQueriesListener
.getSelectQueriesForCurrentThread()
.stream()
.map(t -> t.getSql(true, false))
.collect(Collectors.toList());
String resultingQueryNotFormatted = queries.get(0);
assertEquals(resultingQueryNotFormatted, 1, StringUtils.countMatches(resultingQueryNotFormatted, "Patient.managingOrganization"));
assertThat(resultingQueryNotFormatted, containsString("TARGET_RESOURCE_ID in ('" + ids.get(0) + "' , '" + ids.get(1) + "' , '" + ids.get(2) + "' , '" + ids.get(3) + "' , '" + ids.get(4) + "')"));
// Ensure that the search actually worked
assertEquals(5, search.size().intValue());
}
@After
public void afterResetDao() {
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
}
@Test
public void testWritesPerformMinimalSqlStatements() {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val1");
p.addIdentifier().setSystem("sys2").setValue("val2");
ourLog.info("** About to perform write");
myCaptureQueriesListener.clear();
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write");
assertEquals(6, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
/*
* Not update the value
*/
p = new Patient();
p.setId(id);
p.addIdentifier().setSystem("sys1").setValue("val3");
p.addIdentifier().setSystem("sys2").setValue("val4");
ourLog.info("** About to perform write 2");
myCaptureQueriesListener.clear();
myPatientDao.update(p).getId().toUnqualifiedVersionless();
ourLog.info("** Done performing write 2");
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
}
@Test
public void testSearch() {
create200Patients();
for (int i = 0; i < 20; i++) {
Patient p = new Patient();
p.addIdentifier().setSystem("sys1").setValue("val" + i);
myPatientDao.create(p);
}
myCaptureQueriesListener.clear();
ourLog.info("** About to perform search");
IBundleProvider search = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(false));
ourLog.info("** About to retrieve resources");
search.getResources(0, 20);
ourLog.info("** Done retrieving resources");
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
// Batches of 30 are written for each query - so 9 inserts total
assertEquals(9, myCaptureQueriesListener.countInsertQueries());
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
}
@Test
public void testCreateClientAssignedId() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCaptureQueriesListener.clear();
ourLog.info("** Starting Update Non-Existing resource with client assigned ID");
Patient p = new Patient();
p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
// Because of the forced ID's bidirectional link HFJ_RESOURCE <-> HFJ_FORCED_ID
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count());
assertEquals(1, myForcedIdDao.count());
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
});
// Ok how about an update
myCaptureQueriesListener.clear();
ourLog.info("** Starting Update Existing resource with client assigned ID");
p = new Patient();
p.setId("A");
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count());
assertEquals(1, myForcedIdDao.count());
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
});
}
@Test
public void testOneRowPerUpdate() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCaptureQueriesListener.clear();
Patient p = new Patient();
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2011")); // non-indexed field
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(1, myResourceHistoryTableDao.count());
});
myCaptureQueriesListener.clear();
p = new Patient();
p.setId(id);
p.getPhotoFirstRep().setCreationElement(new DateTimeType("2012")); // non-indexed field
myPatientDao.update(p).getId().toUnqualifiedVersionless();
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
runInTransaction(() -> {
assertEquals(1, myResourceTableDao.count());
assertEquals(2, myResourceHistoryTableDao.count());
});
}
@Test
public void testUpdateReusesIndexes() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myCaptureQueriesListener.clear();
Patient pt = new Patient();
pt.setActive(true);
pt.addName().setFamily("FAMILY1").addGiven("GIVEN1A").addGiven("GIVEN1B");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCaptureQueriesListener.clear();
ourLog.info("** About to update");
pt.setId(id);
pt.getNameFirstRep().addGiven("GIVEN1C");
myPatientDao.update(pt);
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
}
@Test
public void testUpdateReusesIndexesString() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("family", new StringParam("family1")).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("family", new StringParam("family2")).setLoadSynchronous(true);
myCaptureQueriesListener.clear();
Patient pt = new Patient();
pt.addName().setFamily("FAMILY1");
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCaptureQueriesListener.clear();
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
ourLog.info("** About to update");
pt = new Patient();
pt.setId(id);
pt.addName().setFamily("FAMILY2");
myPatientDao.update(pt);
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
}
@Test
public void testUpdateReusesIndexesToken() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("gender", new TokenParam("male")).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("gender", new TokenParam("female")).setLoadSynchronous(true);
myCaptureQueriesListener.clear();
Patient pt = new Patient();
pt.setGender(Enumerations.AdministrativeGender.MALE);
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
/*
* Change a value
*/
ourLog.info("** About to update");
myCaptureQueriesListener.clear();
pt = new Patient();
pt.setId(id);
pt.setGender(Enumerations.AdministrativeGender.FEMALE);
myPatientDao.update(pt);
/*
* Current SELECTs:
* Select the resource from HFJ_RESOURCE
* Select the version from HFJ_RES_VER
* Select the current token indexes
*/
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
myCaptureQueriesListener.clear();
/*
* Drop a value
*/
ourLog.info("** About to update again");
pt = new Patient();
pt.setId(id);
myPatientDao.update(pt);
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
}
@Test
public void testUpdateReusesIndexesResourceLink() {
Organization org1 = new Organization();
org1.setName("org1");
IIdType orgId1 = myOrganizationDao.create(org1).getId().toUnqualifiedVersionless();
Organization org2 = new Organization();
org2.setName("org2");
IIdType orgId2 = myOrganizationDao.create(org2).getId().toUnqualifiedVersionless();
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
SearchParameterMap m1 = new SearchParameterMap().add("organization", new ReferenceParam(orgId1.getValue())).setLoadSynchronous(true);
SearchParameterMap m2 = new SearchParameterMap().add("organization", new ReferenceParam(orgId2.getValue())).setLoadSynchronous(true);
myCaptureQueriesListener.clear();
Patient pt = new Patient();
pt.getManagingOrganization().setReference(orgId1.getValue());
IIdType id = myPatientDao.create(pt).getId().toUnqualifiedVersionless();
myCaptureQueriesListener.clear();
assertEquals(1, myPatientDao.search(m1).size().intValue());
assertEquals(0, myPatientDao.search(m2).size().intValue());
ourLog.info("** About to update");
pt = new Patient();
pt.setId(id);
pt.getManagingOrganization().setReference(orgId2.getValue());
myPatientDao.update(pt);
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
assertEquals(0, myPatientDao.search(m1).size().intValue());
assertEquals(1, myPatientDao.search(m2).size().intValue());
}
@Test
public void testReferenceOrLinksUseInList_ForcedIds() {
List<String> ids = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Organization org = new Organization();
org.setId("ORG"+i);
org.setActive(true);
runInTransaction(()->{
IIdType id = myOrganizationDao.update(org).getId();
ids.add(id.getIdPart());
});
// org = myOrganizationDao.read(id);
// assertTrue(org.getActive());
}
runInTransaction(()->{
for (ResourceTable next : myResourceTableDao.findAll()) {
ourLog.info("Resource pid {} of type {}", next.getId(), next.getResourceType());
}
});
for (int i = 0; i < 5; i++) {
Patient pt = new Patient();
pt.setManagingOrganization(new Reference("Organization/" + ids.get(i)));
myPatientDao.create(pt).getId().getIdPartAsLong();
}
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceOrListParam()
.addOr(new ReferenceParam("Organization/" + ids.get(0)))
.addOr(new ReferenceParam("Organization/" + ids.get(1)))
.addOr(new ReferenceParam("Organization/" + ids.get(2)))
.addOr(new ReferenceParam("Organization/" + ids.get(3)))
.addOr(new ReferenceParam("Organization/" + ids.get(4)))
);
map.setLoadSynchronous(true);
IBundleProvider search = myPatientDao.search(map);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
List<String> queries = myCaptureQueriesListener
.getSelectQueriesForCurrentThread()
.stream()
.map(t -> t.getSql(true, false))
.collect(Collectors.toList());
// Forced ID resolution
String resultingQueryNotFormatted = queries.get(0);
assertThat(resultingQueryNotFormatted, containsString("RESOURCE_TYPE='Organization'"));
assertThat(resultingQueryNotFormatted, containsString("FORCED_ID in ('ORG0' , 'ORG1' , 'ORG2' , 'ORG3' , 'ORG4')"));
// The search itself
resultingQueryNotFormatted = queries.get(1);
assertEquals(resultingQueryNotFormatted, 1, StringUtils.countMatches(resultingQueryNotFormatted, "Patient.managingOrganization"));
assertThat(resultingQueryNotFormatted, matchesPattern(".*TARGET_RESOURCE_ID in \\('[0-9]+' , '[0-9]+' , '[0-9]+' , '[0-9]+' , '[0-9]+'\\).*"));
// Ensure that the search actually worked
assertEquals(5, search.size().intValue());
}
@AfterClass @AfterClass
public static void afterClassClearContext() { public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest(); TestUtil.clearAllStaticFieldsForUnitTest();

View File

@ -1843,10 +1843,9 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
assertEquals(id.withVersion("3").getValue(), history.getEntry().get(0).getResource().getId()); assertEquals(id.withVersion("3").getValue(), history.getEntry().get(0).getResource().getId());
assertEquals(1, ((Patient) history.getEntry().get(0).getResource()).getName().size()); assertEquals(1, ((Patient) history.getEntry().get(0).getResource()).getName().size());
assertEquals(id.withVersion("2").getValue(), history.getEntry().get(1).getResource().getId());
assertEquals(HTTPVerb.DELETE, history.getEntry().get(1).getRequest().getMethodElement().getValue()); assertEquals(HTTPVerb.DELETE, history.getEntry().get(1).getRequest().getMethodElement().getValue());
assertEquals("http://localhost:" + ourPort + "/fhir/context/Patient/" + id.getIdPart() + "/_history/2", history.getEntry().get(1).getRequest().getUrl()); assertEquals("http://localhost:" + ourPort + "/fhir/context/Patient/" + id.getIdPart() + "/_history/2", history.getEntry().get(1).getRequest().getUrl());
assertEquals(0, ((Patient) history.getEntry().get(1).getResource()).getName().size()); assertEquals(null, history.getEntry().get(1).getResource());
assertEquals(id.withVersion("1").getValue(), history.getEntry().get(2).getResource().getId()); assertEquals(id.withVersion("1").getValue(), history.getEntry().get(2).getResource().getId());
assertEquals(1, ((Patient) history.getEntry().get(2).getResource()).getName().size()); assertEquals(1, ((Patient) history.getEntry().get(2).getResource()).getName().size());

View File

@ -2161,15 +2161,16 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
patient.setId(id); patient.setId(id);
ourClient.update().resource(patient).execute(); ourClient.update().resource(patient).execute();
ourLog.info("Res ID: {}", id);
Bundle history = ourClient.history().onInstance(id).andReturnBundle(Bundle.class).prettyPrint().summaryMode(SummaryEnum.DATA).execute(); Bundle history = ourClient.history().onInstance(id).andReturnBundle(Bundle.class).prettyPrint().summaryMode(SummaryEnum.DATA).execute();
assertEquals(3, history.getEntry().size()); assertEquals(3, history.getEntry().size());
assertEquals(id.withVersion("3").getValue(), history.getEntry().get(0).getResource().getId()); assertEquals(id.withVersion("3").getValue(), history.getEntry().get(0).getResource().getId());
assertEquals(1, ((Patient) history.getEntry().get(0).getResource()).getName().size()); assertEquals(1, ((Patient) history.getEntry().get(0).getResource()).getName().size());
assertEquals(id.withVersion("2").getValue(), history.getEntry().get(1).getResource().getId());
assertEquals(HTTPVerb.DELETE, history.getEntry().get(1).getRequest().getMethodElement().getValue()); assertEquals(HTTPVerb.DELETE, history.getEntry().get(1).getRequest().getMethodElement().getValue());
assertEquals("http://localhost:" + ourPort + "/fhir/context/Patient/" + id.getIdPart() + "/_history/2", history.getEntry().get(1).getRequest().getUrl()); assertEquals("http://localhost:" + ourPort + "/fhir/context/Patient/" + id.getIdPart() + "/_history/2", history.getEntry().get(1).getRequest().getUrl());
assertEquals(0, ((Patient) history.getEntry().get(1).getResource()).getName().size()); assertEquals(null, history.getEntry().get(1).getResource());
assertEquals(id.withVersion("1").getValue(), history.getEntry().get(2).getResource().getId()); assertEquals(id.withVersion("1").getValue(), history.getEntry().get(2).getResource().getId());
assertEquals(1, ((Patient) history.getEntry().get(2).getResource()).getName().size()); assertEquals(1, ((Patient) history.getEntry().get(2).getResource()).getName().size());

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.Constants;
import org.hibernate.annotations.OptimisticLock; import org.hibernate.annotations.OptimisticLock;
import javax.persistence.*; import javax.persistence.*;
@ -61,6 +62,22 @@ public abstract class BaseHasResource implements IBaseResourceEntity {
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Date myUpdated; private Date myUpdated;
/**
* This is stored as an optimization to avoid neeind to query for this
* after an update
*/
@Transient
private transient String myTransientForcedId;
public String getTransientForcedId() {
return myTransientForcedId;
}
public void setTransientForcedId(String theTransientForcedId) {
myTransientForcedId = theTransientForcedId;
}
public abstract BaseTag addTag(TagDefinition theDef); public abstract BaseTag addTag(TagDefinition theDef);
@Override @Override
@ -94,7 +111,16 @@ public abstract class BaseHasResource implements IBaseResourceEntity {
public abstract Long getId(); public abstract Long getId();
@Override @Override
public abstract IdDt getIdDt(); public IdDt getIdDt() {
if (getForcedId() == null) {
Long id = getResourceId();
return new IdDt(getResourceType() + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
} else {
// Avoid a join query if possible
String forcedId = getTransientForcedId() != null ? getTransientForcedId() : getForcedId().getForcedId();
return new IdDt(getResourceType() + '/' + forcedId + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
}
}
@Override @Override
public InstantDt getPublished() { public InstantDt getPublished() {
@ -109,6 +135,10 @@ public abstract class BaseHasResource implements IBaseResourceEntity {
myPublished = thePublished; myPublished = thePublished;
} }
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
@Override @Override
public abstract Long getResourceId(); public abstract Long getResourceId();
@ -126,6 +156,10 @@ public abstract class BaseHasResource implements IBaseResourceEntity {
myUpdated = theUpdated; myUpdated = theUpdated;
} }
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
}
@Override @Override
public Date getUpdatedDate() { public Date getUpdatedDate() {
return myUpdated; return myUpdated;
@ -143,12 +177,4 @@ public abstract class BaseHasResource implements IBaseResourceEntity {
myHasTags = theHasTags; myHasTags = theHasTags;
} }
public void setPublished(InstantDt thePublished) {
myPublished = thePublished.getValue();
}
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
}
} }

View File

@ -51,7 +51,7 @@ public class ForcedId {
private Long myId; private Long myId;
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE")) @JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
@OneToOne() @OneToOne(fetch = FetchType.LAZY)
private ResourceTable myResource; private ResourceTable myResource;
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false) @Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)

View File

@ -124,16 +124,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
myId = theId; myId = theId;
} }
@Override
public IdDt getIdDt() {
if (getForcedId() == null) {
Long id = myResourceId;
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
} else {
return new IdDt(getForcedId().getResourceType() + '/' + getForcedId().getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
}
}
public byte[] getResource() { public byte[] getResource() {
return myResource; return myResource;
} }

View File

@ -260,16 +260,6 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myId = theId; myId = theId;
} }
@Override
public IdDt getIdDt() {
if (getForcedId() == null) {
Long id = myId;
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + myVersion);
} else {
return new IdDt(getForcedId().getResourceType() + '/' + getForcedId().getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + myVersion);
}
}
public Long getIndexStatus() { public Long getIndexStatus() {
return myIndexStatus; return myIndexStatus;
} }
@ -577,6 +567,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
retVal.setResourceId(myId); retVal.setResourceId(myId);
retVal.setResourceType(myResourceType); retVal.setResourceType(myResourceType);
retVal.setVersion(myVersion); retVal.setVersion(myVersion);
retVal.setTransientForcedId(getTransientForcedId());
retVal.setPublished(getPublished()); retVal.setPublished(getPublished());
retVal.setUpdated(getUpdated()); retVal.setUpdated(getUpdated());

View File

@ -121,6 +121,10 @@ public class Dstu3BundleFactory implements IVersionSpecificBundleFactory {
entry.getRequest().getMethodElement().setValueAsString(httpVerb); entry.getRequest().getMethodElement().setValueAsString(httpVerb);
entry.getRequest().getUrlElement().setValue(next.getId()); entry.getRequest().getUrlElement().setValue(next.getId());
} }
if ("DELETE".equals(httpVerb)) {
entry.setResource(null);
}
} }
/* /*
@ -212,6 +216,9 @@ public class Dstu3BundleFactory implements IVersionSpecificBundleFactory {
entry.getRequest().setUrl(id.getValue()); entry.getRequest().setUrl(id.getValue());
} }
} }
if ("DELETE".equals(httpVerb)) {
entry.setResource(null);
}
String searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); String searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource);
if (searchMode != null) { if (searchMode != null) {

View File

@ -123,6 +123,9 @@ public class R4BundleFactory implements IVersionSpecificBundleFactory {
entry.getRequest().getMethodElement().setValueAsString(httpVerb); entry.getRequest().getMethodElement().setValueAsString(httpVerb);
entry.getRequest().getUrlElement().setValue(next.getId()); entry.getRequest().getUrlElement().setValue(next.getId());
} }
if ("DELETE".equals(httpVerb)) {
entry.setResource(null);
}
} }
/* /*
@ -216,6 +219,9 @@ public class R4BundleFactory implements IVersionSpecificBundleFactory {
entry.getRequest().setUrl(id.getValue()); entry.getRequest().setUrl(id.getValue());
} }
} }
if ("DELETE".equals(httpVerb)) {
entry.setResource(null);
}
// Populate Response // Populate Response
if ("1".equals(id.getVersionIdPart())) { if ("1".equals(id.getVersionIdPart())) {

View File

@ -100,6 +100,18 @@
a REFERENCE search parameter was used with arguments that consisted of a REFERENCE search parameter was used with arguments that consisted of
unqualified resource IDs. unqualified resource IDs.
</action> </action>
<action type="add">
When performing a search in the JPA server where one of the parameters is a
reference with multiple values (e.g. Patient?organization=A,B) the generated
SQL was previously a set of OR clauses and this has been collapsed into a single
IN clause for better performance.
</action>
<action type="fix">
When returning the results of a history operation from a HAPI FHIR server,
any entries with a method of DELETE contained a stub resource in
Bundle.entry.resource, even though the FHIR spec states that this field
should be empty. This was corrected.
</action>
</release> </release>
<release version="3.7.0" date="2019-02-06" description="Gale"> <release version="3.7.0" date="2019-02-06" description="Gale">
<action type="add"> <action type="add">