Revert pointless SQL optimization (#1977)
* Revert pointless SQL optimization * Fix tests
This commit is contained in:
parent
50ae07acaa
commit
5c14a6c217
|
@ -28,8 +28,9 @@ import org.springframework.batch.core.JobParameters;
|
|||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
|
@ -39,49 +40,54 @@ import java.util.Optional;
|
|||
public class BulkExportJobParameterValidator implements JobParametersValidator {
|
||||
@Autowired
|
||||
private IBulkExportJobDao myBulkExportJobDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job needs Parameters: [readChunkSize], [jobUUID], [filters], [outputFormat], [resourceTypes]");
|
||||
}
|
||||
|
||||
StringBuilder errorBuilder = new StringBuilder();
|
||||
Long readChunkSize = theJobParameters.getLong("readChunkSize");
|
||||
if (readChunkSize == null || readChunkSize < 1) {
|
||||
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
|
||||
}
|
||||
String jobUUID = theJobParameters.getString("jobUUID");
|
||||
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID);
|
||||
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
|
||||
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");
|
||||
}
|
||||
|
||||
|
||||
boolean hasExistingJob = oJob.isPresent();
|
||||
//Check for to-be-created parameters.
|
||||
if (!hasExistingJob) {
|
||||
String resourceTypes = theJobParameters.getString("resourceTypes");
|
||||
if (StringUtils.isBlank(resourceTypes)) {
|
||||
errorBuilder.append("You must include [resourceTypes] as a Job Parameter");
|
||||
} else {
|
||||
String[] resourceArray = resourceTypes.split(",");
|
||||
Arrays.stream(resourceArray).filter(resourceType -> resourceType.equalsIgnoreCase("Binary"))
|
||||
.findFirst()
|
||||
.ifPresent(resourceType -> {
|
||||
errorBuilder.append("Bulk export of Binary resources is forbidden");
|
||||
});
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
String errorMessage = txTemplate.execute(tx -> {
|
||||
StringBuilder errorBuilder = new StringBuilder();
|
||||
Long readChunkSize = theJobParameters.getLong("readChunkSize");
|
||||
if (readChunkSize == null || readChunkSize < 1) {
|
||||
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
|
||||
}
|
||||
|
||||
String outputFormat = theJobParameters.getString("outputFormat");
|
||||
if (!StringUtils.isBlank(outputFormat) && !Constants.CT_FHIR_NDJSON.equals(outputFormat)) {
|
||||
errorBuilder.append("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON);
|
||||
String jobUUID = theJobParameters.getString("jobUUID");
|
||||
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID);
|
||||
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
|
||||
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
String errorMessage = errorBuilder.toString();
|
||||
boolean hasExistingJob = oJob.isPresent();
|
||||
//Check for to-be-created parameters.
|
||||
if (!hasExistingJob) {
|
||||
String resourceTypes = theJobParameters.getString("resourceTypes");
|
||||
if (StringUtils.isBlank(resourceTypes)) {
|
||||
errorBuilder.append("You must include [resourceTypes] as a Job Parameter");
|
||||
} else {
|
||||
String[] resourceArray = resourceTypes.split(",");
|
||||
Arrays.stream(resourceArray).filter(resourceType -> resourceType.equalsIgnoreCase("Binary"))
|
||||
.findFirst()
|
||||
.ifPresent(resourceType -> {
|
||||
errorBuilder.append("Bulk export of Binary resources is forbidden");
|
||||
});
|
||||
}
|
||||
|
||||
String outputFormat = theJobParameters.getString("outputFormat");
|
||||
if (!StringUtils.isBlank(outputFormat) && !Constants.CT_FHIR_NDJSON.equals(outputFormat)) {
|
||||
errorBuilder.append("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
return errorBuilder.toString();
|
||||
});
|
||||
|
||||
if (!StringUtils.isEmpty(errorMessage)) {
|
||||
throw new JobParametersInvalidException(errorMessage);
|
||||
}
|
||||
|
|
|
@ -497,11 +497,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
// one value
|
||||
if (path.size() == 1) {
|
||||
return myCriteriaBuilder.equal(from.get("mySourcePath").as(String.class), path.get(0));
|
||||
}
|
||||
|
||||
// multiple values
|
||||
return from.get("mySourcePath").in(path);
|
||||
}
|
||||
|
|
|
@ -366,12 +366,12 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
|
|||
break;
|
||||
}
|
||||
|
||||
Predicate predicate;
|
||||
if (values.size() == 1) {
|
||||
predicate = myCriteriaBuilder.equal(hashField, values.get(0));
|
||||
} else {
|
||||
predicate = hashField.in(values);
|
||||
}
|
||||
/*
|
||||
* Note: At one point we had an IF-ELSE here that did an equals if there was only 1 value, and an IN if there
|
||||
* was more than 1. This caused a performance regression for some reason in Postgres though. So maybe simpler
|
||||
* is better..
|
||||
*/
|
||||
Predicate predicate = hashField.in(values);
|
||||
|
||||
if (theModifier == TokenParamModifier.NOT) {
|
||||
Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName));
|
||||
|
|
|
@ -697,7 +697,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, true);
|
||||
assertThat(selectQuery, containsString("HASH_VALUE="));
|
||||
assertThat(selectQuery, containsString("HASH_VALUE"));
|
||||
assertThat(selectQuery, not(containsString("HASH_SYS")));
|
||||
|
||||
}
|
||||
|
|
|
@ -398,7 +398,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
|||
unformattedSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(unformattedSql, stringContainsInOrder(
|
||||
"IDX_STRING='Patient?identifier=urn%7C111'",
|
||||
"HASH_SYS_AND_VALUE='-3122824860083758210'"
|
||||
"HASH_SYS_AND_VALUE in ('-3122824860083758210')"
|
||||
));
|
||||
assertThat(unformattedSql, not(containsString(("RES_DELETED_AT"))));
|
||||
assertThat(unformattedSql, not(containsString(("RES_TYPE"))));
|
||||
|
@ -535,7 +535,7 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
|||
unformattedSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(unformattedSql, stringContainsInOrder(
|
||||
"IDX_STRING='ServiceRequest?identifier=sys%7C111&patient=Patient%2F" + ptId.getIdPart() + "&performer=Practitioner%2F" + practId.getIdPart() + "'",
|
||||
"HASH_SYS_AND_VALUE='6795110643554413877'"
|
||||
"HASH_SYS_AND_VALUE in ('6795110643554413877')"
|
||||
));
|
||||
assertThat(unformattedSql, not(containsString(("RES_DELETED_AT"))));
|
||||
assertThat(unformattedSql, not(containsString(("RES_TYPE"))));
|
||||
|
@ -556,8 +556,8 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
|||
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder(srId));
|
||||
unformattedSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(unformattedSql, stringContainsInOrder(
|
||||
"SRC_PATH='ServiceRequest.subject.where(resolve() is Patient)'",
|
||||
"SRC_PATH='ServiceRequest.performer'"
|
||||
"SRC_PATH in ('ServiceRequest.subject.where(resolve() is Patient)')",
|
||||
"SRC_PATH in ('ServiceRequest.performer')"
|
||||
));
|
||||
assertThat(unformattedSql, not(containsString(("RES_DELETED_AT"))));
|
||||
assertThat(unformattedSql, not(containsString(("RES_TYPE"))));
|
||||
|
|
|
@ -1704,15 +1704,14 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
@Test
|
||||
public void testSearch_TagNotParam_SearchAllPartitions() {
|
||||
IIdType patientIdNull = createPatient(withPartition(null), withActiveTrue(), withTag("http://system", "code"));
|
||||
IIdType patientId1 = createPatient(withPartition(1), withActiveTrue(), withTag("http://system", "code"));
|
||||
IIdType patientIdNull = createPatient(withPartition(null), withActiveTrue(), withTag("http://system", "code"), withIdentifier("http://foo", "bar"));
|
||||
IIdType patientId1 = createPatient(withPartition(1), withActiveTrue(), withTag("http://system", "code"), withIdentifier("http://foo", "bar"));
|
||||
IIdType patientId2 = createPatient(withPartition(2), withActiveTrue(), withTag("http://system", "code"));
|
||||
createPatient(withPartition(null), withActiveTrue(), withTag("http://system", "code2"));
|
||||
createPatient(withPartition(1), withActiveTrue(), withTag("http://system", "code2"));
|
||||
createPatient(withPartition(2), withActiveTrue(), withTag("http://system", "code2"));
|
||||
|
||||
addReadAllPartitions();
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
map.add(Constants.PARAM_TAG, new TokenParam("http://system", "code2").setModifier(TokenParamModifier.NOT));
|
||||
|
@ -1725,6 +1724,26 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM='http://system'"));
|
||||
|
||||
// And with another param
|
||||
|
||||
addReadAllPartitions();
|
||||
myCaptureQueriesListener.clear();
|
||||
map = new SearchParameterMap();
|
||||
map.add(Constants.PARAM_TAG, new TokenParam("http://system", "code2").setModifier(TokenParamModifier.NOT));
|
||||
map.add(Patient.SP_IDENTIFIER, new TokenParam("http://foo", "bar"));
|
||||
map.setLoadSynchronous(true);
|
||||
results = myPatientDao.search(map);
|
||||
ids = toUnqualifiedVersionlessIds(results);
|
||||
assertThat(ids, Matchers.contains(patientIdNull, patientId1));
|
||||
|
||||
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM='http://system'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myparamsto1_.HASH_SYS_AND_VALUE in"));
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1947,10 +1966,10 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertThat(ids, Matchers.contains(observationId));
|
||||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.PARTITION_ID='1'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.SRC_PATH='Observation.subject'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.SRC_PATH in ('Observation.subject')"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.TARGET_RESOURCE_ID='" + patientId.getIdPartAsLong() + "'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
|
||||
|
@ -1985,10 +2004,10 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertThat(ids, Matchers.contains(observationId));
|
||||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.PARTITION_ID is null"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.SRC_PATH='Observation.subject'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.SRC_PATH in ('Observation.subject')"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "myresource1_.TARGET_RESOURCE_ID='" + patientId.getIdPartAsLong() + "'"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ public class SearchWithInterceptorR4Test extends BaseJpaR4Test {
|
|||
String query = list.get(0).getSql(true, false);
|
||||
ourLog.info("Query: {}", query);
|
||||
|
||||
assertThat(query, containsString("HASH_SYS_AND_VALUE='3788488238034018567'"));
|
||||
assertThat(query, containsString("HASH_SYS_AND_VALUE in ('3788488238034018567')"));
|
||||
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
|
Loading…
Reference in New Issue