Merge branch 'master' into date-match-bug

This commit is contained in:
Gary Graham 2020-02-25 17:26:53 -05:00
commit df9e86ec66
14 changed files with 213 additions and 26 deletions

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 1726
title: When performing date range searches in the JPA server, the server was generating extra
unneccessary joins in the generated SQL. This has been streamlined, which should result in
faster searches when performing date ranges.

View File

@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.subscription.dbmatcher.CompositeInMemoryDaoSubscriptionMa
import ca.uhn.fhir.jpa.subscription.dbmatcher.DaoSubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
@ -204,10 +205,15 @@ public abstract class BaseConfig {
* Create a @Primary @Bean if you need a different implementation
*/
@Bean
public ISubscribableChannelFactory linkedBlockingQueueSubscribableChannelFactory() {
public ISubscribableChannelFactory subscribableChannelFactory() {
return new LinkedBlockingQueueSubscribableChannelFactory();
}
@Bean
public SubscriptionChannelFactory subscriptionChannelFactory() {
return new SubscriptionChannelFactory();
}
@Bean
@Primary
public ISubscriptionMatcher subscriptionMatcherCompositeInMemoryDatabase() {
@ -280,6 +286,4 @@ public abstract class BaseConfig {
private static HapiFhirHibernateJpaDialect hibernateJpaDialect(HapiLocalizer theLocalizer) {
return new HapiFhirHibernateJpaDialect(theLocalizer);
}
}

View File

@ -41,13 +41,17 @@ import javax.persistence.criteria.Join;
import javax.persistence.criteria.Predicate;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Component
@Scope("prototype")
public class PredicateBuilderDate extends BasePredicateBuilder implements IPredicateBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderDate.class);
private Map<String, Join<ResourceTable, ResourceIndexedSearchParamDate>> myJoinMap;
PredicateBuilderDate(SearchBuilder theSearchBuilder) {
super(theSearchBuilder);
}
@ -58,7 +62,18 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation operation) {
Join<ResourceTable, ResourceIndexedSearchParamDate> join = createJoin(SearchBuilderJoinEnum.DATE, theParamName);
boolean newJoin = false;
if (myJoinMap == null) {
myJoinMap = new HashMap<>();
}
String key = theResourceName + " " + theParamName;
Join<ResourceTable, ResourceIndexedSearchParamDate> join = myJoinMap.get(key);
if (join == null) {
join = createJoin(SearchBuilderJoinEnum.DATE, theParamName);
myJoinMap.put(key, join);
newJoin = true;
}
if (theList.get(0).getMissing() != null) {
Boolean missing = theList.get(0).getMissing();
@ -79,7 +94,14 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
}
Predicate orPredicates = myBuilder.or(toArray(codePredicates));
myQueryRoot.addPredicate(orPredicates);
if (newJoin) {
Predicate identityAndValuePredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicates);
myQueryRoot.addPredicate(identityAndValuePredicate);
} else {
myQueryRoot.addPredicate(orPredicates);
}
return orPredicates;
}
@ -88,12 +110,13 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
String theParamName,
CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom) {
return createPredicateDate(theParam,
Predicate predicateDate = createPredicateDate(theParam,
theResourceName,
theParamName,
theBuilder,
theFrom,
null);
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, predicateDate);
}
private Predicate createPredicateDate(IQueryParameterType theParam,
@ -101,7 +124,7 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
String theParamName,
CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom,
SearchFilterParser.CompareOperation operation) {
SearchFilterParser.CompareOperation theOperation) {
Predicate p;
if (theParam instanceof DateParam) {
@ -111,7 +134,7 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
p = createPredicateDateFromRange(theBuilder,
theFrom,
range,
operation);
theOperation);
} else {
// TODO: handle missing date param?
p = null;
@ -121,12 +144,12 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
p = createPredicateDateFromRange(theBuilder,
theFrom,
range,
operation);
theOperation);
} else {
throw new IllegalArgumentException("Invalid token type: " + theParam.getClass());
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, p);
return p;
}
private boolean isNullOrDayPrecision(DateParam theDateParam) {

View File

@ -3258,6 +3258,105 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
}
}
@Test
public void testSearchWithDateAndReusesExistingJoin() {
// Add a search parameter to Observation.issued, so that between that one
// and the existing one on Observation.effective, we have 2 date search parameters
// on the same resource
{
SearchParameter sp = new SearchParameter();
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.addBase("Observation");
sp.setType(Enumerations.SearchParamType.DATE);
sp.setCode("issued");
sp.setExpression("Observation.issued");
mySearchParameterDao.create(sp);
mySearchParamRegistry.forceRefresh();
}
// Dates are reversed on these two observations
IIdType obsId1;
{
Observation obs = new Observation();
obs.setIssuedElement(new InstantType("2020-06-06T12:00:00Z"));
obs.setEffective(new InstantType("2019-06-06T12:00:00Z"));
obsId1 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
IIdType obsId2;
{
Observation obs = new Observation();
obs.setIssuedElement(new InstantType("2019-06-06T12:00:00Z"));
obs.setEffective(new InstantType("2020-06-06T12:00:00Z"));
obsId2 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
// Add two with a period
IIdType obsId3;
{
Observation obs = new Observation();
obs.setEffective(new Period().setStartElement(new DateTimeType("2000-06-06T12:00:00Z")).setEndElement(new DateTimeType("2001-06-06T12:00:00Z")));
obsId3 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
IIdType obsId4;
{
Observation obs = new Observation();
obs.setEffective(new Period().setStartElement(new DateTimeType("2001-01-01T12:00:00Z")).setEndElement(new DateTimeType("2002-01-01T12:00:00Z")));
obsId4 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
}
// Two AND instances of 1 SP
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("issued", new DateParam("ge2020-06-05"));
params.add("issued", new DateParam("lt2020-06-07"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, contains(obsId1));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
// Two AND instances of 1 SP and 1 instance of another
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("issued", new DateParam("ge2020-06-05"));
params.add("issued", new DateParam("lt2020-06-07"));
params.add("date", new DateParam("gt2019-06-05"));
params.add("date", new DateParam("lt2019-06-07"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, contains(obsId1));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 2, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 4, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
// Period search
{
myCaptureQueriesListener.clear();
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("date", new DateParam("lt2002-01-01T12:00:00Z"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients.toString(), patients, containsInAnyOrder(obsId3, obsId4));
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search query:\n{}", searchQuery);
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "join"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "hash_identity"));
assertEquals(searchQuery, 1, StringUtils.countMatches(searchQuery.toLowerCase(), "sp_value_low"));
}
}
@Test
public void testSearchWithFetchSizeDefaultMaximum() {
myDaoConfig.setFetchSizeDefaultMaximum(5);

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate;
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import com.google.common.annotations.VisibleForTesting;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.MigrationInfoService;
@ -87,7 +88,11 @@ public class FlywayMigrator extends BaseMigrator {
@Override
public void addTasks(List<BaseTask> theTasks) {
theTasks.forEach(this::addTask);
if ("true".equals(System.getProperty("unit_test_mode"))) {
theTasks.stream().filter(task -> task instanceof InitializeSchemaTask).forEach(this::addTask);
} else {
theTasks.forEach(this::addTask);
}
}
@Override

View File

@ -108,7 +108,9 @@ public abstract class BaseTask<T extends BaseTask> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
try {
int changesCount = jdbcTemplate.update(theSql, theArguments);
logInfo(ourLog, "SQL \"{}\" returned {}", theSql, changesCount);
if (!"true".equals(System.getProperty("unit_test_mode"))) {
logInfo(ourLog, "SQL \"{}\" returned {}", theSql, changesCount);
}
return changesCount;
} catch (DataAccessException e) {
if (myFailureAllowed) {

View File

@ -34,12 +34,13 @@ import java.util.Set;
public class InitializeSchemaTask extends BaseTask<InitializeSchemaTask> {
private static final Logger ourLog = LoggerFactory.getLogger(InitializeSchemaTask.class);
private final ISchemaInitializationProvider mySchemaInitializationProvider;
public InitializeSchemaTask(String theProductVersion, String theSchemaVersion, ISchemaInitializationProvider theSchemaInitializationProvider) {
super(theProductVersion, theSchemaVersion);
mySchemaInitializationProvider = theSchemaInitializationProvider;
setDescription("Initialize schema");
setDescription("Initialize schema for " + mySchemaInitializationProvider.getSchemaDescription());
}
@Override
@ -58,13 +59,15 @@ public class InitializeSchemaTask extends BaseTask<InitializeSchemaTask> {
return;
}
logInfo(ourLog, "Initializing schema for {}", driverType);
logInfo(ourLog, "Initializing {} schema for {}", driverType, mySchemaInitializationProvider.getSchemaDescription());
List<String> sqlStatements = mySchemaInitializationProvider.getSqlStatements(driverType);
for (String nextSql : sqlStatements) {
executeSql(null, nextSql);
}
logInfo(ourLog, "{} schema for {} initialized successfully", driverType, mySchemaInitializationProvider.getSchemaDescription());
}
@Override
@ -77,4 +80,8 @@ public class InitializeSchemaTask extends BaseTask<InitializeSchemaTask> {
protected void generateHashCode(HashCodeBuilder theBuilder) {
theBuilder.append(mySchemaInitializationProvider);
}
public ISchemaInitializationProvider getSchemaInitializationProvider() {
return mySchemaInitializationProvider;
}
}

View File

@ -908,10 +908,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
}
private void init330() { // 20180114 - 20180329
protected void init330() { // 20180114 - 20180329
Builder version = forVersion(VersionEnum.V3_3_0);
version.initializeSchema("20180115.0", new SchemaInitializationProvider("/ca/uhn/hapi/fhir/jpa/docs/database", "HFJ_RESOURCE"));
version.initializeSchema("20180115.0", new SchemaInitializationProvider("HAPI FHIR", "/ca/uhn/hapi/fhir/jpa/docs/database", "HFJ_RESOURCE"));
Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
version.startSectionWithMessage("Starting work on table: " + hfjResource.getTableName());

View File

@ -37,14 +37,17 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
public class SchemaInitializationProvider implements ISchemaInitializationProvider {
private final String mySchemaFileClassPath;
private String mySchemaFileClassPath;
private String mySchemaDescription;
private final String mySchemaExistsIndicatorTable;
/**
* @param theSchemaFileClassPath pathname to script used to initialize schema
* @param theSchemaExistsIndicatorTable a table name we can use to determine if this schema has already been initialized
*/
public SchemaInitializationProvider(String theSchemaFileClassPath, String theSchemaExistsIndicatorTable) {
public SchemaInitializationProvider(String theSchemaDescription, String theSchemaFileClassPath, String theSchemaExistsIndicatorTable) {
mySchemaDescription = theSchemaDescription;
mySchemaFileClassPath = theSchemaFileClassPath;
mySchemaExistsIndicatorTable = theSchemaExistsIndicatorTable;
}
@ -110,5 +113,21 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid
public String getSchemaExistsIndicatorTable() {
return mySchemaExistsIndicatorTable;
}
public SchemaInitializationProvider setSchemaFileClassPath(String theSchemaFileClassPath) {
mySchemaFileClassPath = theSchemaFileClassPath;
return this;
}
@Override
public String getSchemaDescription() {
return mySchemaDescription;
}
@Override
public SchemaInitializationProvider setSchemaDescription(String theSchemaDescription) {
mySchemaDescription = theSchemaDescription;
return this;
}
}

View File

@ -81,6 +81,13 @@ public class BaseMigrationTasks<T extends Enum> {
return retval;
}
protected BaseTask getTaskWithVersion(String theFlywayVersion) {
return myTasks.values().stream()
.filter(task -> theFlywayVersion.equals(task.getFlywayVersion()))
.findFirst()
.get();
}
void validate(Collection<BaseTask> theTasks) {
for (BaseTask task: theTasks) {
task.validateVersion();

View File

@ -25,7 +25,12 @@ import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import java.util.List;
public interface ISchemaInitializationProvider {
List<String> getSqlStatements(DriverTypeEnum theDriverType);
String getSchemaExistsIndicatorTable();
String getSchemaDescription();
ISchemaInitializationProvider setSchemaDescription(String theSchemaDescription);
}

View File

@ -40,6 +40,16 @@ public class InitializeSchemaTaskTest extends BaseTest {
return "DONT_MATCH_ME";
}
@Override
public String getSchemaDescription() {
return "TEST";
}
@Override
public ISchemaInitializationProvider setSchemaDescription(String theSchemaDescription) {
return this;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;

View File

@ -24,17 +24,11 @@ import ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage;
import ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.stereotype.Component;
@Component
public class SubscriptionChannelFactory {
private ISubscribableChannelFactory mySubscribableChannelFactory;
@Autowired
public SubscriptionChannelFactory(ISubscribableChannelFactory theSubscribableChannelFactory) {
mySubscribableChannelFactory = theSubscribableChannelFactory;
}
private ISubscribableChannelFactory mySubscribableChannelFactory;
public SubscribableChannel newDeliveryChannel(String theChannelName) {
return mySubscribableChannelFactory.createSubscribableChannel(theChannelName, ResourceDeliveryMessage.class, mySubscribableChannelFactory.getDeliveryChannelConcurrentConsumers());

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.subscription.module.config;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.channel.SubscriptionChannelFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@ -33,7 +34,7 @@ import org.springframework.scheduling.annotation.EnableScheduling;
@ComponentScan(basePackages = {"ca.uhn.fhir.jpa.subscription.module"})
public abstract class BaseSubscriptionConfig {
@Bean
public ISubscribableChannelFactory blockingQueueSubscriptionDeliveryChannelFactory() {
public ISubscribableChannelFactory subscribableChannelFactory() {
return new LinkedBlockingQueueSubscribableChannelFactory();
}
@ -41,4 +42,9 @@ public abstract class BaseSubscriptionConfig {
public InterceptorService interceptorRegistry() {
return new InterceptorService("hapi-fhir-jpa-subscription");
}
@Bean
public SubscriptionChannelFactory subscriptionChannelFactory() {
return new SubscriptionChannelFactory();
}
}