Reduce SQL queries for _include (#4007)

* Rework includes processing for reuse

* Optimize include

* Test fixes

* Bump version down

* Fixed

* Update

* Test fixes

* Start to add javadoc

* Test fixes
This commit is contained in:
James Agnew 2022-09-25 15:22:06 -04:00 committed by GitHub
parent 3935b78083
commit 3bab3544ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 422 additions and 199 deletions

View File

@ -276,20 +276,6 @@ public class RuntimeSearchParam {
return retVal;
}
private boolean pathMatchesResourceType(String theResourceName, String thePath) {
if (thePath.startsWith(theResourceName + ".")) {
return true;
}
if (thePath.startsWith("Resouce.") || thePath.startsWith("DomainResource.")) {
return true;
}
if (Character.isLowerCase(thePath.charAt(0))) {
return true;
}
return false;
}
public enum RuntimeSearchParamStatusEnum {
ACTIVE,
DRAFT,
@ -297,6 +283,51 @@ public class RuntimeSearchParam {
UNKNOWN
}
/**
* This method tests whether a given FHIRPath expression <i>could</i>
* possibly apply to the given resource type.
*
* @param theResourceName
* @param thePath
* @return
*/
static boolean pathMatchesResourceType(String theResourceName, String thePath) {
for (int i = 0; i < thePath.length() - 1; i++) {
char nextChar = thePath.charAt(i);
if (Character.isLowerCase(nextChar)) {
return true;
}
if (Character.isLetter(nextChar)) {
if (fhirPathExpressionStartsWith(theResourceName, thePath, i)) {
return true;
}
if (fhirPathExpressionStartsWith("Resource", thePath, i)) {
return true;
}
if (fhirPathExpressionStartsWith("DomainResource", thePath, i)) {
return true;
}
return false;
}
}
return false;
}
private static boolean fhirPathExpressionStartsWith(String theResourceName, String thePath, int theStartingIndex) {
if (thePath.startsWith(theResourceName, theStartingIndex) && thePath.length() > theResourceName.length()) {
for (int i = theResourceName.length() + theStartingIndex; i < thePath.length(); i++) {
char nextChar = thePath.charAt(i);
if (nextChar == '.') {
return true;
} else if (nextChar != ' ') {
return false;
}
}
}
return false;
}
public static class Component {
private final String myExpression;
private final String myReference;

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.context;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class RuntimeSearchParamTest {
@ParameterizedTest
@CsvSource({
"true, Patient, Patient.identifier",
"true, Patient, Resource.identifier",
"true, Patient, DomainResource.identifier",
"true, Patient, (Patient.identifier)",
"true, Patient, (Patient.identifier )",
"true, Patient, (Resource.identifier)",
"true, Patient, (DomainResource.identifier)",
"true, Patient, (DomainResource.identifier )",
"true, Patient, ((Patient.identifier))",
"true, Patient, ((Patient.identifier ))",
"true, Patient, ((Patient.identifier ) )",
"true, Patient, (( Patient.identifier))",
"true, Patient, ( ( Patient.identifier))",
"true, Patient, ((Resource.identifier))",
"true, Patient, (( Resource.identifier))",
"true, Patient, (( Resource.identifier))",
"true, Patient, ((DomainResource.identifier))",
"true, Patient, (( DomainResource.identifier))",
"true, Patient, (( DomainResource. identifier))",
"true, Patient, (( DomainResource . identifier))",
"true, Patient, (( DomainResource.identifier))",
"true, Patient, identifier",
"true, Patient, (identifier)",
"false, Patient, Observation.identifier",
"false, Patient, PatientFoo.identifier",
"false, Patient, Patient",
"false, Patient, PatientFoo",
"false, Patient, ((Observation.identifier)",
"false, Patient, ((Observation.identifier))",
"false, Patient, (( Observation.identifier))",
"false, Patient, (( Observation.identifier))"
})
public void getPathMatchesResourceType(boolean theShouldMatch, String theResourceType, String thePath) {
assertEquals(theShouldMatch, RuntimeSearchParam.pathMatchesResourceType(theResourceType, thePath));
}
}

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 4007
title: "Processing for `_include` and `_revinclude` parameters in the JPA server has been streamlined, which should
improve performance on systems where includes are heavily used."

View File

@ -43,7 +43,7 @@ public interface ISearchDao extends JpaRepository<Search, Long>, IHapiFhirJpaRep
@Query("SELECT s.myId FROM Search s WHERE s.myDeleted = TRUE")
Slice<Long> findDeleted(Pageable thePage);
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = FALSE AND s.myStatus <> 'FAILED'")
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND s.mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = FALSE AND s.myStatus <> 'FAILED'")
Collection<Search> findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
@Query("SELECT COUNT(s) FROM Search s WHERE s.myDeleted = TRUE")

View File

@ -15,6 +15,7 @@ import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
@ -99,7 +100,7 @@ public class Search implements ICachedSearchDetails, Serializable {
@SequenceGenerator(name = "SEQ_SEARCH", sequenceName = "SEQ_SEARCH")
@Column(name = "PID")
private Long myId;
@OneToMany(mappedBy = "mySearch")
@OneToMany(mappedBy = "mySearch", cascade = CascadeType.ALL)
private Collection<SearchInclude> myIncludes;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "LAST_UPDATED_HIGH", nullable = true, insertable = true, updatable = false)

View File

@ -1027,7 +1027,7 @@ public class SearchBuilder implements ISearchBuilder {
* so it can't be Collections.emptySet() or some such thing
*/
@Override
public Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theIncludes,
public Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Collection<Include> theIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount) {
if (theMatches.size() == 0) {
return new HashSet<>();
@ -1132,6 +1132,8 @@ public class SearchBuilder implements ISearchBuilder {
} else {
List<String> paths;
// Start replace
RuntimeSearchParam param;
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
@ -1154,7 +1156,8 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
paths = theReverseMode ? param.getPathsSplitForResourceType(resType) : param.getPathsSplit();
paths = param.getPathsSplitForResourceType(resType);
// end replace
String targetResourceType = defaultString(nextInclude.getParamTargetType(), null);
for (String nextPath : paths) {
@ -1224,10 +1227,10 @@ public class SearchBuilder implements ISearchBuilder {
q.setParameter("target_resource_types", param.getTargets());
}
List<Tuple> results = q.getResultList();
if (theMaxCount != null) {
q.setMaxResults(theMaxCount);
}
List<Tuple> results = q.getResultList();
for (Tuple result : results) {
if (result != null) {
Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS)));

View File

@ -90,15 +90,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
@Transactional(Transactional.TxType.REQUIRED)
@Override
public Search save(Search theSearch) {
Search newSearch;
if (theSearch.getId() == null) {
newSearch = mySearchDao.save(theSearch);
for (SearchInclude next : theSearch.getIncludes()) {
mySearchIncludeDao.save(next);
}
} else {
newSearch = mySearchDao.save(theSearch);
}
Search newSearch = mySearchDao.save(theSearch);
return newSearch;
}

View File

@ -14,7 +14,6 @@ import ca.uhn.fhir.mdm.util.MdmResourceUtil;
import ca.uhn.fhir.mdm.util.MessageHelper;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.r4.model.Patient;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ -23,10 +22,8 @@ import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -52,7 +49,7 @@ class MdmLinkCreateSvcImplTest {
MdmLinkCreateSvcImpl myMdmLinkCreateSvc = new MdmLinkCreateSvcImpl();
@Test
public void testCreateLink(){
public void testCreateLink() {
ArgumentCaptor<IMdmLink> mdmLinkCaptor = ArgumentCaptor.forClass(IMdmLink.class);
when(myMdmLinkDaoSvc.save(mdmLinkCaptor.capture())).thenReturn(new MdmLink());

View File

@ -239,12 +239,16 @@ public class SearchParameterMap implements Serializable {
b.append(Constants.PARAM_INCLUDE_QUALIFIER_RECURSE);
}
b.append('=');
b.append(UrlUtil.escapeUrlParam(nextInclude.getParamType()));
b.append(':');
b.append(UrlUtil.escapeUrlParam(nextInclude.getParamName()));
if (isNotBlank(nextInclude.getParamTargetType())) {
if (Constants.INCLUDE_STAR.equals(nextInclude.getValue())) {
b.append(Constants.INCLUDE_STAR);
} else {
b.append(UrlUtil.escapeUrlParam(nextInclude.getParamType()));
b.append(':');
b.append(nextInclude.getParamTargetType());
b.append(UrlUtil.escapeUrlParam(nextInclude.getParamName()));
if (isNotBlank(nextInclude.getParamTargetType())) {
b.append(':');
b.append(nextInclude.getParamTargetType());
}
}
}
}
@ -587,6 +591,13 @@ public class SearchParameterMap implements Serializable {
return myIncludes != null && !myIncludes.isEmpty();
}
/**
* @since 6.2.0
*/
public boolean hasRevIncludes() {
return myRevIncludes != null && !myRevIncludes.isEmpty();
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);

View File

@ -39,6 +39,27 @@ class SearchParameterMapTest {
assertEquals("?_lastUpdated=ge2021-05-31", map.toNormalizedQueryString(ourFhirContext));
}
@Test
void toNormalizedQueryString_IncludeNormal() {
SearchParameterMap map = new SearchParameterMap();
map.addInclude(new Include("Patient:name"));
assertEquals("?_include=Patient:name", map.toNormalizedQueryString(ourFhirContext));
}
@Test
void toNormalizedQueryString_IncludeStar() {
SearchParameterMap map = new SearchParameterMap();
map.addInclude(new Include("*"));
assertEquals("?_include=*", map.toNormalizedQueryString(ourFhirContext));
}
@Test
void toNormalizedQueryString_IncludeTypedStar() {
SearchParameterMap map = new SearchParameterMap();
map.addInclude(new Include("Patient:*"));
assertEquals("?_include=Patient:*", map.toNormalizedQueryString(ourFhirContext));
}
@Test
void toNormalizedQueryStringUpper() {
SearchParameterMap map = new SearchParameterMap();

View File

@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
import ca.uhn.fhir.jpa.util.SqlQuery;
@ -20,6 +21,7 @@ import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.util.BundleBuilder;
@ -917,6 +919,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
IBundleProvider outcome = myPatientDao.search(map);
assertEquals(SimpleBundleProvider.class, outcome.getClass());
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder(
"Patient/P1", "CareTeam/CT1-0", "CareTeam/CT1-1", "CareTeam/CT1-2",
"Patient/P2", "CareTeam/CT2-0", "CareTeam/CT2-1", "CareTeam/CT2-2"
@ -929,6 +932,115 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testSearchWithMultipleIncludes_Async() {
// Setup
createPatient(withId("A"), withFamily("Hello"));
createEncounter(withId("E"), withIdentifier("http://foo", "bar"));
createObservation(withId("O"), withSubject("Patient/A"), withEncounter("Encounter/E"));
List<String> ids;
// Test
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.addInclude(Observation.INCLUDE_ENCOUNTER);
map.addInclude(Observation.INCLUDE_PATIENT);
map.addInclude(Observation.INCLUDE_SUBJECT);
IBundleProvider results = myObservationDao.search(map, mySrd);
assertEquals(PersistedJpaSearchFirstPageBundleProvider.class, results.getClass());
ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, containsInAnyOrder("Patient/A", "Encounter/E", "Observation/O"));
// Verify
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(7, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
assertEquals(1, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
runInTransaction(()->{
assertEquals(1, mySearchEntityDao.count());
assertEquals(3, mySearchIncludeEntityDao.count());
});
}
@Test
public void testSearchWithMultipleIncludesRecurse_Async() {
// Setup
createPatient(withId("A"), withFamily("Hello"));
createEncounter(withId("E"), withIdentifier("http://foo", "bar"));
createObservation(withId("O"), withSubject("Patient/A"), withEncounter("Encounter/E"));
List<String> ids;
// Test
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.addInclude(Observation.INCLUDE_ENCOUNTER.asRecursive());
map.addInclude(Observation.INCLUDE_PATIENT.asRecursive());
map.addInclude(Observation.INCLUDE_SUBJECT.asRecursive());
ids = toUnqualifiedVersionlessIdValues(myObservationDao.search(map, mySrd));
assertThat(ids, containsInAnyOrder("Patient/A", "Encounter/E", "Observation/O"));
// Verify
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(10, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
assertEquals(1, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testSearchWithMultipleIncludes_Sync() {
// Setup
createPatient(withId("A"), withFamily("Hello"));
createEncounter(withId("E"), withIdentifier("http://foo", "bar"));
createObservation(withId("O"), withSubject("Patient/A"), withEncounter("Encounter/E"));
List<String> ids;
// Test
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.addInclude(Observation.INCLUDE_ENCOUNTER);
map.addInclude(Observation.INCLUDE_PATIENT);
map.addInclude(Observation.INCLUDE_SUBJECT);
ids = toUnqualifiedVersionlessIdValues(myObservationDao.search(map, mySrd));
assertThat(ids, containsInAnyOrder("Patient/A", "Encounter/E", "Observation/O"));
// Verify
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(5, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testSearchWithMultipleIncludesRecurse_Sync() {
// Setup
createPatient(withId("A"), withFamily("Hello"));
createEncounter(withId("E"), withIdentifier("http://foo", "bar"));
createObservation(withId("O"), withSubject("Patient/A"), withEncounter("Encounter/E"));
List<String> ids;
// Test
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.addInclude(Observation.INCLUDE_ENCOUNTER.asRecursive());
map.addInclude(Observation.INCLUDE_PATIENT.asRecursive());
map.addInclude(Observation.INCLUDE_SUBJECT.asRecursive());
ids = toUnqualifiedVersionlessIdValues(myObservationDao.search(map, mySrd));
assertThat(ids, containsInAnyOrder("Patient/A", "Encounter/E", "Observation/O"));
// Verify
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(8, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testTransactionWithMultipleCreates() {
myDaoConfig.setMassIngestionMode(true);

View File

@ -876,7 +876,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
List<IIdType> actual = toUnqualifiedVersionlessIds(resp);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertThat(actual, containsInAnyOrder(orgId, medId, patId, moId, patId2));
assertEquals(7, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
// Specific patient ID with linked stuff
request = mock(HttpServletRequest.class);

View File

@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchResult;
@ -30,8 +29,6 @@ public class SearchCoordinatorSvcImplTest extends BaseJpaR4Test {
@Autowired
private ISearchResultDao mySearchResultDao;
@Autowired
private ISearchIncludeDao mySearchIncludeDao;
@Autowired
private ISearchCoordinatorSvc mySearchCoordinator;
@ -52,7 +49,6 @@ public class SearchCoordinatorSvcImplTest extends BaseJpaR4Test {
runInTransaction(()->{
mySearchResultDao.deleteAll();
mySearchIncludeDao.deleteAll();
mySearchDao.deleteAll();
});
runInTransaction(()->{

View File

@ -1,10 +1,8 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude;
import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -42,8 +40,6 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
private ISearchDao mySearchEntityDao;
@Autowired
private ISearchResultDao mySearchResultDao;
@Autowired
private ISearchIncludeDao mySearchIncludeDao;
@Override
@AfterEach()
@ -136,18 +132,14 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
mySearchResultDao.save(sr);
}
SearchInclude si = new SearchInclude(search, "Patient:name", false, false);
mySearchIncludeDao.save(si);
});
// It should take two passes to delete the search fully
runInTransaction(()->assertEquals(1, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(1, mySearchEntityDao.count()));
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
runInTransaction(()->assertEquals(1, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(1, mySearchEntityDao.count()));
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
runInTransaction(()->assertEquals(0, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));
}
@ -166,13 +158,13 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
});
// It should take one pass to delete the search fully
runInTransaction(()-> {
runInTransaction(() -> {
assertEquals(1, mySearchEntityDao.count());
});
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
runInTransaction(()-> {
runInTransaction(() -> {
assertEquals(0, mySearchEntityDao.count());
});
}
@ -198,15 +190,15 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
});
// Should not delete right now
runInTransaction(()->assertEquals(1, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(1, mySearchEntityDao.count()));
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
runInTransaction(()->assertEquals(1, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(1, mySearchEntityDao.count()));
sleepAtLeast(1100);
// Now it's time to delete
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
runInTransaction(()->assertEquals(0, mySearchEntityDao.count()));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));
}

View File

@ -57,6 +57,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
@ -237,6 +238,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc;
@Autowired
protected ISearchDao mySearchEntityDao;
@Autowired
protected ISearchIncludeDao mySearchIncludeEntityDao;
@Autowired
protected ISearchResultDao mySearchResultDao;
@Autowired

View File

@ -74,18 +74,6 @@ public class BulkExportCreateReportStep implements IReductionStepWorker<BulkExpo
return RunOutcome.SUCCESS;
}
private static String getOriginatingRequestUrl(@Nonnull StepExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theStepExecutionDetails, BulkExportJobResults results) {
IJobInstance instance = theStepExecutionDetails.getInstance();
String url = "";
if (instance instanceof JobInstance) {
JobInstance jobInstance = (JobInstance) instance;
BulkExportJobParameters parameters = jobInstance.getParameters(BulkExportJobParameters.class);
String originalRequestUrl = parameters.getOriginalRequestUrl();
url = originalRequestUrl;
}
return url;
}
@Nonnull
@Override
public ChunkOutcome consume(ChunkExecutionDetails<BulkExportJobParameters,
@ -101,4 +89,16 @@ public class BulkExportCreateReportStep implements IReductionStepWorker<BulkExpo
return ChunkOutcome.SUCCESS();
}
private static String getOriginatingRequestUrl(@Nonnull StepExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theStepExecutionDetails, BulkExportJobResults results) {
IJobInstance instance = theStepExecutionDetails.getInstance();
String url = "";
if (instance instanceof JobInstance) {
JobInstance jobInstance = (JobInstance) instance;
BulkExportJobParameters parameters = jobInstance.getParameters(BulkExportJobParameters.class);
String originalRequestUrl = parameters.getOriginalRequestUrl();
url = originalRequestUrl;
}
return url;
}
}

View File

@ -25,10 +25,10 @@ import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
@ -45,7 +45,7 @@ public class BulkImportParameterValidator implements IJobParametersValidator<Bat
@Nullable
@Override
public List<String> validate(@NotNull Batch2BulkImportPullJobParameters theParameters) {
public List<String> validate(@Nonnull Batch2BulkImportPullJobParameters theParameters) {
ourLog.info("BulkImportPull parameter validation begin");
ArrayList<String> errors = new ArrayList<>();

View File

@ -30,9 +30,10 @@ import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
import ca.uhn.fhir.batch2.importpull.models.BulkImportFilePartitionResult;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import javax.annotation.Nonnull;
import static org.slf4j.LoggerFactory.getLogger;
public class FetchPartitionedFilesStep implements IFirstJobStepWorker<Batch2BulkImportPullJobParameters, BulkImportFilePartitionResult> {
@ -44,11 +45,11 @@ public class FetchPartitionedFilesStep implements IFirstJobStepWorker<Batch2Bulk
myBulkDataImportSvc = theBulkDataImportSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<Batch2BulkImportPullJobParameters, VoidModel> theStepExecutionDetails,
@NotNull IJobDataSink<BulkImportFilePartitionResult> theDataSink
@Nonnull StepExecutionDetails<Batch2BulkImportPullJobParameters, VoidModel> theStepExecutionDetails,
@Nonnull IJobDataSink<BulkImportFilePartitionResult> theDataSink
) throws JobExecutionFailedException {
String jobId = theStepExecutionDetails.getParameters().getJobId();

View File

@ -34,10 +34,10 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.util.IoUtil;
import com.google.common.io.LineReader;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.StringReader;
@ -51,13 +51,13 @@ public class ReadInResourcesFromFileStep implements IJobStepWorker<Batch2BulkImp
myBulkDataImportSvc = theBulkDataImportSvc;
}
// because we are using an unstable google api
// because we are using an unstable Google api
@SuppressWarnings("UnstableApiUsage")
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<Batch2BulkImportPullJobParameters, BulkImportFilePartitionResult> theStepExecutionDetails,
@NotNull IJobDataSink<BulkImportRecord> theDataSink
@Nonnull StepExecutionDetails<Batch2BulkImportPullJobParameters, BulkImportFilePartitionResult> theStepExecutionDetails,
@Nonnull IJobDataSink<BulkImportRecord> theDataSink
) throws JobExecutionFailedException {
String jobId = theStepExecutionDetails.getParameters().getJobId();
int fileIndex = theStepExecutionDetails.getData().getFileIndex();

View File

@ -35,10 +35,11 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.parser.IParser;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
public class WriteBundleForImportStep implements ILastJobStepWorker<Batch2BulkImportPullJobParameters, BulkImportRecord> {
private static final Logger ourLog = LoggerFactory.getLogger(WriteBundleForImportStep.class);
@ -53,11 +54,11 @@ public class WriteBundleForImportStep implements ILastJobStepWorker<Batch2BulkIm
}
@SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"})
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<Batch2BulkImportPullJobParameters, BulkImportRecord> theStepExecutionDetails,
@NotNull IJobDataSink<VoidModel> theDataSink
@Nonnull StepExecutionDetails<Batch2BulkImportPullJobParameters, BulkImportRecord> theStepExecutionDetails,
@Nonnull IJobDataSink<VoidModel> theDataSink
) throws JobExecutionFailedException {
BulkImportRecord record = theStepExecutionDetails.getData();

View File

@ -28,7 +28,8 @@ import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
public class DeleteCodeSystemConceptsByVersionStep implements IJobStepWorker<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult, CodeSystemVersionPIDResult> {
@ -38,11 +39,11 @@ public class DeleteCodeSystemConceptsByVersionStep implements IJobStepWorker<Ter
myITermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@NotNull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@Nonnull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
) throws JobExecutionFailedException {
CodeSystemVersionPIDResult versionPidResult = theStepExecutionDetails.getData();

View File

@ -31,7 +31,8 @@ import ca.uhn.fhir.batch2.model.ChunkOutcome;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
public class DeleteCodeSystemStep implements IReductionStepWorker<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult, VoidModel> {
@ -41,11 +42,11 @@ public class DeleteCodeSystemStep implements IReductionStepWorker<TermCodeSystem
myITermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@NotNull IJobDataSink<VoidModel> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@Nonnull IJobDataSink<VoidModel> theDataSink
) throws JobExecutionFailedException {
// final step
long codeId = theStepExecutionDetails.getParameters().getTermPid();
@ -56,7 +57,7 @@ public class DeleteCodeSystemStep implements IReductionStepWorker<TermCodeSystem
return RunOutcome.SUCCESS;
}
@NotNull
@Nonnull
@Override
public ChunkOutcome consume(ChunkExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theChunkDetails) {
/*

View File

@ -28,7 +28,8 @@ import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
public class DeleteCodeSystemVersionStep implements IJobStepWorker<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult, CodeSystemVersionPIDResult> {
@ -38,11 +39,11 @@ public class DeleteCodeSystemVersionStep implements IJobStepWorker<TermCodeSyste
myITermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@NotNull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@Nonnull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
) throws JobExecutionFailedException {
CodeSystemVersionPIDResult versionPidResult = theStepExecutionDetails.getData();

View File

@ -29,8 +29,8 @@ import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
import java.util.Iterator;
public class ReadTermConceptVersionsStep implements IFirstJobStepWorker<TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult> {
@ -41,11 +41,11 @@ public class ReadTermConceptVersionsStep implements IFirstJobStepWorker<TermCode
myITermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteJobParameters, VoidModel> theStepExecutionDetails,
@NotNull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteJobParameters, VoidModel> theStepExecutionDetails,
@Nonnull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
) throws JobExecutionFailedException {
TermCodeSystemDeleteJobParameters parameters = theStepExecutionDetails.getParameters();

View File

@ -22,9 +22,9 @@ package ca.uhn.fhir.batch2.jobs.termcodesystem.codesystemdelete;
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
@ -32,7 +32,7 @@ public class TermCodeSystemDeleteJobParametersValidator implements IJobParameter
@Nullable
@Override
public List<String> validate(@NotNull TermCodeSystemDeleteJobParameters theParameters) {
public List<String> validate(@Nonnull TermCodeSystemDeleteJobParameters theParameters) {
List<String> errors = new ArrayList<>();
if (theParameters.getTermPid() <= 0) {
errors.add("Invalid Term Code System PID " + theParameters.getTermPid());

View File

@ -29,7 +29,8 @@ import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
public class DeleteCodeSystemVersionFinalStep implements ILastJobStepWorker<TermCodeSystemDeleteVersionJobParameters, CodeSystemVersionPIDResult> {
@ -39,11 +40,11 @@ public class DeleteCodeSystemVersionFinalStep implements ILastJobStepWorker<Term
myTermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteVersionJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@NotNull IJobDataSink<VoidModel> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteVersionJobParameters, CodeSystemVersionPIDResult> theStepExecutionDetails,
@Nonnull IJobDataSink<VoidModel> theDataSink
) throws JobExecutionFailedException {
long versionPid = theStepExecutionDetails.getParameters().getCodeSystemVersionPid();

View File

@ -29,7 +29,8 @@ import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc;
import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
public class DeleteCodeSystemVersionFirstStep implements IFirstJobStepWorker<TermCodeSystemDeleteVersionJobParameters, CodeSystemVersionPIDResult> {
@ -39,11 +40,11 @@ public class DeleteCodeSystemVersionFirstStep implements IFirstJobStepWorker<Ter
myTermCodeSystemSvc = theCodeSystemDeleteJobSvc;
}
@NotNull
@Nonnull
@Override
public RunOutcome run(
@NotNull StepExecutionDetails<TermCodeSystemDeleteVersionJobParameters, VoidModel> theStepExecutionDetails,
@NotNull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
@Nonnull StepExecutionDetails<TermCodeSystemDeleteVersionJobParameters, VoidModel> theStepExecutionDetails,
@Nonnull IJobDataSink<CodeSystemVersionPIDResult> theDataSink
) throws JobExecutionFailedException {
long versionId = theStepExecutionDetails.getParameters().getCodeSystemVersionPid();

View File

@ -22,9 +22,9 @@ package ca.uhn.fhir.batch2.jobs.termcodesystem.codesystemversiondelete;
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
@ -32,7 +32,7 @@ public class DeleteCodeSystemVersionParameterValidator implements IJobParameters
@Nullable
@Override
public List<String> validate(@NotNull TermCodeSystemDeleteVersionJobParameters theParameters) {
public List<String> validate(@Nonnull TermCodeSystemDeleteVersionJobParameters theParameters) {
ArrayList<String> errors = new ArrayList<>();
long versionPID = theParameters.getCodeSystemVersionPid();

View File

@ -48,7 +48,7 @@ public interface ISearchBuilder {
void loadResourcesByPid(Collection<ResourcePersistentId> thePids, Collection<ResourcePersistentId> theIncludedPids, List<IBaseResource> theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails);
Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Collection<Include> theRevIncludes, boolean theReverseMode,
DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount);
/**

View File

@ -15,13 +15,13 @@ import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.annotation.Nonnull;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@ -37,29 +37,23 @@ import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class SearchParameterValidatingInterceptorTest {
static final FhirContext ourFhirContext = FhirContext.forR4();
public static final String UPLIFT_URL = "https://some-url";
static final FhirContext ourFhirContext = FhirContext.forR4();
static String ID1 = "ID1";
static String ID2 = "ID2";
@Mock
RequestDetails myRequestDetails;
@Mock
IFhirResourceDao myIFhirResourceDao;
@Mock
DaoRegistry myDaoRegistry;
@Mock
IIdHelperService myIdHelperService;
SearchParamValidatingInterceptor mySearchParamValidatingInterceptor;
SearchParameter myExistingSearchParameter;
static String ID1 = "ID1";
static String ID2 = "ID2";
@BeforeEach
public void beforeEach(){
public void beforeEach() {
mySearchParamValidatingInterceptor = new SearchParamValidatingInterceptor();
mySearchParamValidatingInterceptor.setFhirContext(ourFhirContext);
@ -73,7 +67,7 @@ public class SearchParameterValidatingInterceptorTest {
}
@Test
public void whenValidatingInterceptorCalledForNonSearchParamResoucre_thenIsAllowed(){
public void whenValidatingInterceptorCalledForNonSearchParamResoucre_thenIsAllowed() {
Patient patient = new Patient();
mySearchParamValidatingInterceptor.resourcePreCreate(patient, null);
@ -81,7 +75,7 @@ public class SearchParameterValidatingInterceptorTest {
}
@Test
public void whenCreatingNonOverlappingSearchParam_thenIsAllowed(){
public void whenCreatingNonOverlappingSearchParam_thenIsAllowed() {
when(myDaoRegistry.getResourceDao(eq(SearchParamValidatingInterceptor.SEARCH_PARAM))).thenReturn(myIFhirResourceDao);
setPersistedSearchParameterIds(emptyList());
@ -93,7 +87,7 @@ public class SearchParameterValidatingInterceptorTest {
}
@Test
public void whenCreatingOverlappingSearchParam_thenExceptionIsThrown(){
public void whenCreatingOverlappingSearchParam_thenExceptionIsThrown() {
when(myDaoRegistry.getResourceDao(eq(SearchParamValidatingInterceptor.SEARCH_PARAM))).thenReturn(myIFhirResourceDao);
setPersistedSearchParameterIds(asList(myExistingSearchParameter));
@ -103,14 +97,14 @@ public class SearchParameterValidatingInterceptorTest {
try {
mySearchParamValidatingInterceptor.resourcePreCreate(newSearchParam, myRequestDetails);
fail();
}catch (UnprocessableEntityException e){
} catch (UnprocessableEntityException e) {
assertTrue(e.getMessage().contains("2131"));
}
}
@Test
public void whenUsingPutOperationToCreateNonOverlappingSearchParam_thenIsAllowed(){
public void whenUsingPutOperationToCreateNonOverlappingSearchParam_thenIsAllowed() {
when(myDaoRegistry.getResourceDao(eq(SearchParamValidatingInterceptor.SEARCH_PARAM))).thenReturn(myIFhirResourceDao);
setPersistedSearchParameterIds(emptyList());
@ -121,7 +115,7 @@ public class SearchParameterValidatingInterceptorTest {
}
@Test
public void whenUsingPutOperationToCreateOverlappingSearchParam_thenExceptionIsThrown(){
public void whenUsingPutOperationToCreateOverlappingSearchParam_thenExceptionIsThrown() {
when(myDaoRegistry.getResourceDao(eq(SearchParamValidatingInterceptor.SEARCH_PARAM))).thenReturn(myIFhirResourceDao);
setPersistedSearchParameterIds(asList(myExistingSearchParameter));
@ -131,13 +125,13 @@ public class SearchParameterValidatingInterceptorTest {
try {
mySearchParamValidatingInterceptor.resourcePreUpdate(null, newSearchParam, myRequestDetails);
fail();
}catch (UnprocessableEntityException e){
} catch (UnprocessableEntityException e) {
assertTrue(e.getMessage().contains("2125"));
}
}
@Test
public void whenUpdateSearchParam_thenIsAllowed(){
public void whenUpdateSearchParam_thenIsAllowed() {
when(myDaoRegistry.getResourceDao(eq(SearchParamValidatingInterceptor.SEARCH_PARAM))).thenReturn(myIFhirResourceDao);
setPersistedSearchParameterIds(asList(myExistingSearchParameter));
@ -172,12 +166,12 @@ public class SearchParameterValidatingInterceptorTest {
try {
mySearchParamValidatingInterceptor.resourcePreUpdate(null, newSearchParam, myRequestDetails);
fail();
}catch (UnprocessableEntityException e){
} catch (UnprocessableEntityException e) {
assertTrue(e.getMessage().contains("2125"));
}
}
@NotNull
@Nonnull
private SearchParameter buildSearchParameterWithUpliftExtension(String theID) {
SearchParameter newSearchParam = buildSearchParameterWithId(theID);
@ -198,7 +192,7 @@ public class SearchParameterValidatingInterceptorTest {
return newSearchParam;
}
private void setPersistedSearchParameterIds(List<SearchParameter> theSearchParams){
private void setPersistedSearchParameterIds(List<SearchParameter> theSearchParams) {
List<ResourcePersistentId> resourcePersistentIds = theSearchParams
.stream()
.map(SearchParameter::getId)
@ -206,6 +200,7 @@ public class SearchParameterValidatingInterceptorTest {
.collect(Collectors.toList());
when(myIFhirResourceDao.searchForIds(any(), any())).thenReturn(resourcePersistentIds);
}
private void setPersistedSearchParameters(List<SearchParameter> theSearchParams) {
when(myIFhirResourceDao.search(any(), any())).thenReturn(new SimpleBundleProvider(theSearchParams));
}

View File

@ -33,10 +33,12 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Date;
@ -179,6 +181,10 @@ public interface ITestDataBuilder {
return t -> t.getMeta().setLastUpdated(new InstantType(theIsoDate).getValue());
}
default IIdType createEncounter(Consumer<IBaseResource>... theModifiers) {
return createResource("Encounter", theModifiers);
}
default IIdType createObservation(Consumer<IBaseResource>... theModifiers) {
return createResource("Observation", theModifiers);
}
@ -190,7 +196,6 @@ public interface ITestDataBuilder {
default IBaseResource buildPatient(Consumer<IBaseResource>... theModifiers) {
return buildResource("Patient", theModifiers);
}
default IIdType createPatient(Consumer<IBaseResource>... theModifiers) {
return createResource("Patient", theModifiers);
}
@ -223,13 +228,26 @@ public interface ITestDataBuilder {
default Consumer<IBaseResource> withSubject(@Nullable IIdType theSubject) {
return withReference("subject", theSubject);
}
default Consumer<IBaseResource> withSubject(@Nullable String theSubject) {
return withSubject(new IdType(theSubject));
}
default Consumer<IBaseResource> withEncounter(@Nullable String theEncounter) {
return withReference("encounter", new IdType(theEncounter));
}
@Nonnull
private Consumer<IBaseResource> withReference(String theReferenceName, @Nullable IIdType theReferenceValue) {
return t -> {
if (theSubject != null) {
if (theReferenceValue != null && theReferenceValue.getValue() != null) {
IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance();
reference.setReference(theSubject.getValue());
reference.setReference(theReferenceValue.getValue());
RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass());
resourceDef.getChildByName("subject").getMutator().addValue(t, reference);
resourceDef.getChildByName(theReferenceName).getMutator().addValue(t, reference);
}
};
}
@ -301,27 +319,11 @@ public interface ITestDataBuilder {
}
default Consumer<IBaseResource> withObservationHasMember(@Nullable IIdType theHasMember) {
return t -> {
if (theHasMember != null) {
IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance();
reference.setReference(theHasMember.getValue());
RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass());
resourceDef.getChildByName("hasMember").getMutator().addValue(t, reference);
}
};
return withReference("hasMember", theHasMember);
}
default Consumer<IBaseResource> withOrganization(@Nullable IIdType theHasMember) {
return t -> {
if (theHasMember != null) {
IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance();
reference.setReference(theHasMember.getValue());
RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass());
resourceDef.getChildByName("managingOrganization").getMutator().addValue(t, reference);
}
};
return withReference("managingOrganization", theHasMember);
}
// todo mb extract these to something like TestDataBuilderBacking. Maybe split out create* into child interface since people skip it.

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.test.utilities;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
@ -30,7 +29,6 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.IServerAddressStrategy;
import ca.uhn.fhir.rest.server.IncomingRequestAddressStrategy;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider;
@ -69,11 +67,11 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
public RestServerR4Helper(boolean theInitialize) {
super(FhirContext.forR4Cached());
myRestServer = new MyRestfulServer(myFhirContext);
if(theInitialize){
if (theInitialize) {
try {
myRestServer.initialize();
} catch (ServletException e) {
throw new RuntimeException(Msg.code(2110)+"Failed to initialize server", e);
throw new RuntimeException(Msg.code(2110) + "Failed to initialize server", e);
}
}
}
@ -186,6 +184,10 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
return myRestServer.getObservationResourceProvider();
}
public void setObservationResourceProvider(HashMapResourceProvider<Observation> theResourceProvider) {
myRestServer.setObservationResourceProvider(theResourceProvider);
}
@Override
public HashMapResourceProvider<Patient> getPatientResourceProvider() {
return myRestServer.getPatientResourceProvider();
@ -210,7 +212,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
@Override
public IIdType createPatient(IBaseResource theBaseResource) {
return myRestServer.getPatientResourceProvider().store((Patient) theBaseResource);
return myRestServer.getPatientResourceProvider().store((Patient) theBaseResource);
}
@Override
@ -223,7 +225,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
@Override
public IIdType createObservation(IBaseResource theBaseResource) {
return myRestServer.getObservationResourceProvider().store((Observation) theBaseResource);
return myRestServer.getObservationResourceProvider().store((Observation) theBaseResource);
}
public List<String> getRequestUrls() {
@ -234,10 +236,6 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
return myRestServer.myRequestVerbs;
}
public void setObservationResourceProvider(HashMapResourceProvider<Observation> theResourceProvider) {
myRestServer.setObservationResourceProvider(theResourceProvider);
}
public List<Map<String, String>> getRequestHeaders() {
return myRestServer.myRequestHeaders;
}
@ -247,20 +245,20 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
}
@Override
public void setServerAddressStrategy(IServerAddressStrategy theServerAddressStrategy){
public void setServerAddressStrategy(IServerAddressStrategy theServerAddressStrategy) {
myRestServer.setServerAddressStrategy(theServerAddressStrategy);
}
private static class MyRestfulServer extends RestfulServer {
private final List<String> myRequestUrls = Collections.synchronizedList(new ArrayList<>());
private final List<String> myRequestVerbs = Collections.synchronizedList(new ArrayList<>());
private final List<Map<String, String>> myRequestHeaders = Collections.synchronizedList(new ArrayList<>());
private boolean myFailNextPut;
private HashMapResourceProvider<Patient> myPatientResourceProvider;
private HashMapResourceProvider<Observation> myObservationResourceProvider;
private HashMapResourceProvider<Organization> myOrganizationResourceProvider;
private HashMapResourceProvider<ConceptMap> myConceptMapResourceProvider;
private RestServerDstu3Helper.MyPlainProvider myPlainProvider;
private final List<String> myRequestUrls = Collections.synchronizedList(new ArrayList<>());
private final List<String> myRequestVerbs = Collections.synchronizedList(new ArrayList<>());
private final List<Map<String, String>> myRequestHeaders= Collections.synchronizedList(new ArrayList<>());
public MyRestfulServer(FhirContext theFhirContext) {
super(theFhirContext);
@ -297,7 +295,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
super.service(theReq, theResp);
}
private Map<String,String> pullOutHeaders(HttpServletRequest theReq) {
private Map<String, String> pullOutHeaders(HttpServletRequest theReq) {
Enumeration<String> headerNames = theReq.getHeaderNames();
Map<String, String> headers = new HashMap<>();
while (headerNames.hasMoreElements()) {
@ -321,6 +319,14 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
return myObservationResourceProvider;
}
public void setObservationResourceProvider(HashMapResourceProvider<Observation> theResourceProvider) {
myObservationResourceProvider.getStoredResources().forEach(o -> theResourceProvider.store(o));
unregisterProvider(myObservationResourceProvider);
registerProvider(theResourceProvider);
myObservationResourceProvider = theResourceProvider;
}
public HashMapResourceProvider<Organization> getOrganizationResourceProvider() {
return myOrganizationResourceProvider;
}
@ -329,6 +335,14 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
return myConceptMapResourceProvider;
}
public void setConceptMapResourceProvider(HashMapResourceProvider<ConceptMap> theResourceProvider) {
myConceptMapResourceProvider.getStoredResources().forEach(c -> theResourceProvider.store(c));
unregisterProvider(myConceptMapResourceProvider);
registerProvider(theResourceProvider);
myConceptMapResourceProvider = theResourceProvider;
}
public HashMapResourceProvider<Patient> getPatientResourceProvider() {
return myPatientResourceProvider;
}
@ -353,23 +367,6 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
setPagingProvider(new FifoMemoryPagingProvider(20));
}
public void setObservationResourceProvider(HashMapResourceProvider<Observation> theResourceProvider) {
myObservationResourceProvider.getStoredResources().forEach(o -> theResourceProvider.store(o));
unregisterProvider(myObservationResourceProvider);
registerProvider(theResourceProvider);
myObservationResourceProvider = theResourceProvider;
}
public void setConceptMapResourceProvider(HashMapResourceProvider<ConceptMap> theResourceProvider) {
myConceptMapResourceProvider.getStoredResources().forEach(c -> theResourceProvider.store(c));
unregisterProvider(myConceptMapResourceProvider);
registerProvider(theResourceProvider);
myConceptMapResourceProvider = theResourceProvider;
}
public class MyHashMapResourceProvider<T extends IBaseResource> extends HashMapResourceProvider<T> {
public MyHashMapResourceProvider(FhirContext theContext, Class theType) {
super(theContext, theType);
@ -378,7 +375,7 @@ public class RestServerR4Helper extends BaseRestServerHelper implements BeforeEa
@Override
public MethodOutcome update(T theResource, String theConditional, RequestDetails theRequestDetails) {
if (myFailNextPut) {
throw new PreconditionFailedException(Msg.code(2111)+"Failed update operation");
throw new PreconditionFailedException(Msg.code(2111) + "Failed update operation");
}
return super.update(theResource, theConditional, theRequestDetails);
}

View File

@ -6,7 +6,6 @@ import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.i18n.HapiLocalizer;
import org.hl7.fhir.r5.model.Resource;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;

View File

@ -23,7 +23,6 @@ import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class NpmPackageValidationSupportTest {
@ -34,6 +33,7 @@ public class NpmPackageValidationSupportTest {
"dummyBinary1.txt", "myDummyContent1".getBytes(),
"dummyBinary2.txt", "myDummyContent2".getBytes()
);
@Test
public void testValidateWithPackage() throws IOException {

View File

@ -2013,7 +2013,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.2</version>
<version>3.2.0</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>

View File

@ -15,6 +15,12 @@
<property name="format" value="System\.out\.println"/>
<property name="ignoreComments" value="true"/>
</module>
<module name="RegexpSinglelineJava">
<property name="format" value="org\.jetbrains\.annotations\.NotNull"/>
</module>
<module name="RegexpSinglelineJava">
<property name="format" value="org\.jetbrains\.annotations\.Nullable"/>
</module>
<module name="AbstractClassName">
<property name="format" value="^(Base|Abstract).+$"/>
</module>

View File

@ -156,6 +156,6 @@
<properties>
<!--<kotlin.compiler.incremental>true</kotlin.compiler.incremental>-->
<kotlin.version>1.5.31</kotlin.version>
<kotlin.version>1.6.21</kotlin.version>
</properties>
</project>