Mergeback of rel_6_8 (#5146)
* version bump * Bump to core release 6.0.22 (#5028) * Bump to core release 6.0.16 * Bump to core version 6.0.20 * Fix errors thrown as a result of VersionSpecificWorkerContextWrapper * Bump to core 6.0.22 * Resolve 5126 hfj res ver prov might cause migration error on db that automatically indexes the primary key (#5127) * dropped old index FK_RESVERPROV_RES_PID on RES_PID column before adding IDX_RESVERPROV_RES_PID * added changelog * changed to valid version number * changed to valid version number, need to be ordered by version number... * 5123 - Use DEFAULT partition for server-based requests if none specified (#5124) 5123 - Use DEFAULT partition for server-based requests if none specified * consent remove all suppresses next link in bundle (#5119) * added FIXME with source of issue * added FIXME with root cause * added FIXME with root cause * Providing solution to the issue and removing fixmes. * Providing changelog * auto-formatting. * Adding new test. * Adding a new test for standard paging * let's try this and see if it works...? * fix tests * cleanup to trigger a new run * fixing tests --------- Co-authored-by: Ken Stevens <ken@smilecdr.com> Co-authored-by: peartree <etienne.poirier@smilecdr.com> * 5117 MDM Score for No Match Fields Should Not Be Included in Total Score (#5118) * fix, test, changelog * fix, test, changelog --------- Co-authored-by: justindar <justin.dar@smilecdr.com> * Rename file to force IT mode --------- Co-authored-by: dotasek <david.otasek@smilecdr.com> Co-authored-by: TynerGjs <132295567+TynerGjs@users.noreply.github.com> Co-authored-by: Steve Corbett <137920358+steve-corbett-smilecdr@users.noreply.github.com> Co-authored-by: Ken Stevens <khstevens@gmail.com> Co-authored-by: Ken Stevens <ken@smilecdr.com> Co-authored-by: peartree <etienne.poirier@smilecdr.com> Co-authored-by: jdar8 <69840459+jdar8@users.noreply.github.com> Co-authored-by: justindar <justin.dar@smilecdr.com>
This commit is contained in:
parent
2b91f0a232
commit
5eda18f500
|
@ -19,18 +19,45 @@
|
|||
*/
|
||||
package ca.uhn.fhir.util;
|
||||
|
||||
import java.util.StringTokenizer;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class UrlPathTokenizer {
|
||||
|
||||
private final StringTokenizer myTok;
|
||||
private String[] tokens;
|
||||
private int curPos;
|
||||
|
||||
public UrlPathTokenizer(String theRequestPath) {
|
||||
myTok = new StringTokenizer(theRequestPath, "/");
|
||||
if (theRequestPath == null) {
|
||||
theRequestPath = "";
|
||||
}
|
||||
tokens = removeBlanksAndSanitize(theRequestPath.split("/"));
|
||||
curPos = 0;
|
||||
}
|
||||
|
||||
public boolean hasMoreTokens() {
|
||||
return myTok.hasMoreTokens();
|
||||
return curPos < tokens.length;
|
||||
}
|
||||
|
||||
public int countTokens() {
|
||||
return tokens.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next token without updating the current position.
|
||||
* Will throw NoSuchElementException if there are no more tokens.
|
||||
*/
|
||||
public String peek() {
|
||||
if (!hasMoreTokens()) {
|
||||
throw new NoSuchElementException(Msg.code(2420) + "Attempt to retrieve URL token out of bounds");
|
||||
}
|
||||
return tokens[curPos];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -43,6 +70,22 @@ public class UrlPathTokenizer {
|
|||
* @see UrlUtil#unescape(String)
|
||||
*/
|
||||
public String nextTokenUnescapedAndSanitized() {
|
||||
return UrlUtil.sanitizeUrlPart(UrlUtil.unescape(myTok.nextToken()));
|
||||
String token = peek();
|
||||
curPos++;
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an array of Strings, this method will return all the non-blank entries in that
|
||||
* array, after running sanitizeUrlPart() and unescape() on them.
|
||||
*/
|
||||
private static String[] removeBlanksAndSanitize(String[] theInput) {
|
||||
List<String> output = new ArrayList<>();
|
||||
for (String s : theInput) {
|
||||
if (!isBlank(s)) {
|
||||
output.add(UrlUtil.sanitizeUrlPart(UrlUtil.unescape(s)));
|
||||
}
|
||||
}
|
||||
return output.toArray(ArrayUtils.EMPTY_STRING_ARRAY);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5117
|
||||
title: "Previously, all MDM field scores, including `NO_MATCH`es, were included in the final total MDM score. This has
|
||||
now been fixed so that only `MATCH`ed fields are included in the total MDM score."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5119
|
||||
jira: SMILE-7090
|
||||
title: "Previously, when the consent service would remove all resources to be returned, the response bundle would
|
||||
not provide the previous/next link(s). This has been corrected."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5126
|
||||
title: "Previously, updating from Hapi-fhir 6.6.0 to 6.8.0 would cause migration error, it is now fixed."
|
|
@ -126,8 +126,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.withColumns("RES_VER_PID")
|
||||
.failureAllowed();
|
||||
|
||||
// drop the index for any database that has RES_PID column already indexed from previous migrations
|
||||
version.onTable("HFJ_RES_VER_PROV")
|
||||
.addIndex("20230510.2", "IDX_RESVERPROV_RES_PID")
|
||||
.dropIndex("20230510.2", "FK_RESVERPROV_RES_PID")
|
||||
.failureAllowed();
|
||||
|
||||
version.onTable("HFJ_RES_VER_PROV")
|
||||
.addIndex("20230510.3", "IDX_RESVERPROV_RES_PID")
|
||||
.unique(false)
|
||||
.withColumns("RES_PID");
|
||||
|
||||
|
|
|
@ -336,8 +336,13 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
|
||||
String dirName = "package";
|
||||
NpmPackage.NpmPackageFolder packageFolder = npmPackage.getFolders().get(dirName);
|
||||
for (Map.Entry<String, List<String>> nextTypeToFiles :
|
||||
packageFolder.getTypes().entrySet()) {
|
||||
Map<String, List<String>> packageFolderTypes = null;
|
||||
try {
|
||||
packageFolderTypes = packageFolder.getTypes();
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(Msg.code(2371) + e);
|
||||
}
|
||||
for (Map.Entry<String, List<String>> nextTypeToFiles : packageFolderTypes.entrySet()) {
|
||||
String nextType = nextTypeToFiles.getKey();
|
||||
for (String nextFile : nextTypeToFiles.getValue()) {
|
||||
|
||||
|
|
|
@ -49,8 +49,13 @@ public class PackageResourceParsingSvc {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
ArrayList<IBaseResource> resources = new ArrayList<>();
|
||||
List<String> filesForType =
|
||||
thePkg.getFolders().get("package").getTypes().get(theType);
|
||||
List<String> filesForType = null;
|
||||
try {
|
||||
filesForType = thePkg.getFolders().get("package").getTypes().get(theType);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(
|
||||
Msg.code(2370) + "Cannot install resource of type " + theType + ": Could not get types", e);
|
||||
}
|
||||
if (filesForType != null) {
|
||||
for (String file : filesForType) {
|
||||
try {
|
||||
|
|
|
@ -15,6 +15,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.emptyOrNullString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
@ -125,6 +126,36 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingNextLink_whenAllResourcesHaveBeenReturned_willNotBePresent(){
|
||||
|
||||
myServer.setDefaultPageSize(5);
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
createPatient(withId("A" + i), withActiveTrue());
|
||||
}
|
||||
|
||||
Bundle outcome = myClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.ACTIVE.exactly().code("true"))
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
assertThat(outcome.getEntry(), hasSize(5));
|
||||
|
||||
Bundle secondPageBundle = myClient.loadPage().next(outcome).execute();
|
||||
|
||||
assertThat(secondPageBundle.getEntry(), hasSize(5));
|
||||
|
||||
Bundle thirdPageBundle = myClient.loadPage().next(secondPageBundle).execute();
|
||||
|
||||
assertThat(thirdPageBundle.getEntry(), hasSize(0));
|
||||
assertNull(thirdPageBundle.getLink("next"), () -> thirdPageBundle.getLink("next").getUrl());
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearch_WithExplicitCount() {
|
||||
|
@ -179,7 +210,6 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
|
|||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
assertThat(outcome.getLink(Constants.LINK_NEXT).getUrl(), containsString("Patient?_count=7&_offset=14&active=true"));
|
||||
assertThat(outcome.getLink(Constants.LINK_PREVIOUS).getUrl(), containsString("Patient?_count=7&_offset=0&active=true"));
|
||||
|
||||
}
|
||||
|
|
|
@ -84,9 +84,9 @@ import static org.mockito.ArgumentMatchers.any;
|
|||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||
public class ConsentInterceptorResourceProviderR4IT extends BaseResourceProviderR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ConsentInterceptorResourceProviderR4Test.class);
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ConsentInterceptorResourceProviderR4IT.class);
|
||||
private List<String> myObservationIds;
|
||||
private List<String> myPatientIds;
|
||||
private List<String> myObservationIdsOddOnly;
|
||||
|
@ -745,6 +745,42 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPaging_whenResourceViewingIsRejected_responseBundleWillHaveNextLink(){
|
||||
// given
|
||||
create50Observations();
|
||||
|
||||
myConsentInterceptor = new ConsentInterceptor(new ConsentSvcRejectWillSeeResource());
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(myConsentInterceptor);
|
||||
|
||||
// when
|
||||
Bundle results = myClient.search().forResource(Observation.class).count(10).returnBundle(Bundle.class).execute();
|
||||
assertThat(results.getEntry(), hasSize(0));
|
||||
|
||||
// then
|
||||
String nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, results, "next");
|
||||
assertThat(nextUrl, containsString("_getpagesoffset=10"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPaging_whenResourceViewingIsRejected_secondPageWillHavePreviousLink(){
|
||||
// given
|
||||
create50Observations();
|
||||
|
||||
myConsentInterceptor = new ConsentInterceptor(new ConsentSvcRejectWillSeeResource());
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(myConsentInterceptor);
|
||||
|
||||
// when
|
||||
Bundle results = myClient.search().forResource(Observation.class).count(10).returnBundle(Bundle.class).execute();
|
||||
Bundle nextResults = myClient.loadPage().next(results).execute();
|
||||
|
||||
// then
|
||||
String previous = BundleUtil.getLinkUrlOfType(myFhirContext, nextResults, "previous");
|
||||
assertThat(previous, containsString("_getpagesoffset=0"));
|
||||
|
||||
}
|
||||
|
||||
private void createPatientAndOrg() {
|
||||
myPatientIds = new ArrayList<>();
|
||||
|
||||
|
@ -1062,5 +1098,15 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
|||
|
||||
}
|
||||
|
||||
private static class ConsentSvcRejectWillSeeResource implements IConsentService {
|
||||
@Override
|
||||
public ConsentOutcome willSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) {
|
||||
if("Bundle".equals(theResource.fhirType())){
|
||||
return new ConsentOutcome(ConsentOperationStatusEnum.PROCEED);
|
||||
}
|
||||
return new ConsentOutcome(ConsentOperationStatusEnum.REJECT);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -2482,12 +2482,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
"Patient/A/_history/1"
|
||||
));
|
||||
|
||||
history = myClient
|
||||
.loadPage()
|
||||
.next(history)
|
||||
.execute();
|
||||
|
||||
assertEquals(0, history.getEntry().size());
|
||||
// we got them all
|
||||
assertNull(history.getLink("next"));
|
||||
|
||||
/*
|
||||
* Try with a date offset
|
||||
|
|
|
@ -28,7 +28,7 @@ class CdsHooksContextBooterTest {
|
|||
@Test
|
||||
void validateJsonThrowsExceptionWhenInputIsInvalid() {
|
||||
// setup
|
||||
final String expected = "Invalid JSON: Unrecognized token 'abc': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n" +
|
||||
final String expected = "HAPI-2378: Invalid JSON: Unrecognized token 'abc': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n" +
|
||||
" at [Source: (String)\"abc\"; line: 1, column: 4]";
|
||||
// execute
|
||||
final UnprocessableEntityException actual = assertThrows(UnprocessableEntityException.class, () -> myFixture.validateJson("abc"));
|
||||
|
|
|
@ -93,7 +93,7 @@ class CdsPrefetchFhirClientSvcTest {
|
|||
IBaseResource srq = myCdsPrefetchFhirClientSvc.resourceFromUrl(cdsServiceRequestJson, "1234");
|
||||
fail("should throw, no resource present");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Unable to translate url 1234 into a resource or a bundle.", e.getMessage());
|
||||
assertEquals("HAPI-2384: Unable to translate url 1234 into a resource or a bundle.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ class CdsPrefetchFhirClientSvcTest {
|
|||
IBaseResource srq = myCdsPrefetchFhirClientSvc.resourceFromUrl(cdsServiceRequestJson, "/1234");
|
||||
fail("should throw, no resource present");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Failed to resolve /1234. Url does not start with a resource type.", e.getMessage());
|
||||
assertEquals("HAPI-2383: Failed to resolve /1234. Url does not start with a resource type.", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ class PrefetchTemplateUtilTest {
|
|||
PrefetchTemplateUtil.substituteTemplate(template, context, FhirContext.forR4());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Either request context was empty or it did not provide a value for key <userId>. Please make sure you are including a context with valid keys.", e.getMessage());
|
||||
assertEquals("HAPI-2375: Either request context was empty or it did not provide a value for key <userId>. Please make sure you are including a context with valid keys.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ class PrefetchTemplateUtilTest {
|
|||
PrefetchTemplateUtil.substituteTemplate(template, context, FhirContext.forR4());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Either request context was empty or it did not provide a value for key <userId>. Please make sure you are including a context with valid keys.", e.getMessage());
|
||||
assertEquals("HAPI-2375: Either request context was empty or it did not provide a value for key <userId>. Please make sure you are including a context with valid keys.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ class PrefetchTemplateUtilTest {
|
|||
PrefetchTemplateUtil.substituteTemplate(template, context, FhirContext.forR4());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Request context did not provide a value for key <draftOrders>. Available keys in context are: [patientId]", e.getMessage());
|
||||
assertEquals("HAPI-2372: Request context did not provide a value for key <draftOrders>. Available keys in context are: [patientId]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,7 +119,7 @@ class PrefetchTemplateUtilTest {
|
|||
PrefetchTemplateUtil.substituteTemplate(template, context, FhirContext.forR4());
|
||||
fail("substituteTemplate call was successful with a null context field.");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Request context did not provide for resource(s) matching template. ResourceType missing is: ServiceRequest", e.getMessage());
|
||||
assertEquals("HAPI-2373: Request context did not provide for resource(s) matching template. ResourceType missing is: ServiceRequest", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ class PrefetchTemplateUtilTest {
|
|||
PrefetchTemplateUtil.substituteTemplate(template, context, FhirContext.forR4());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Request context did not provide valid " + fhirContextR4.getVersion().getVersion() + " Bundle resource for template key <draftOrders>" , e.getMessage());
|
||||
assertEquals("HAPI-2374: Request context did not provide valid " + fhirContextR4.getVersion().getVersion() + " Bundle resource for template key <draftOrders>", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -151,13 +151,13 @@ public class MdmResourceMatcherSvc {
|
|||
fieldComparator.getName(),
|
||||
matchEvaluation.score,
|
||||
vector);
|
||||
score += matchEvaluation.score;
|
||||
} else {
|
||||
ourLog.trace(
|
||||
"No match: Matcher {} did not match (score: {}).",
|
||||
fieldComparator.getName(),
|
||||
matchEvaluation.score);
|
||||
}
|
||||
score += matchEvaluation.score;
|
||||
appliedRuleCount += 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -57,4 +57,19 @@ public class MdmResourceMatcherSvcR4Test extends BaseMdmRulesR4Test {
|
|||
patient3.addName().addGiven("Henry");
|
||||
assertMatchResult(MdmMatchResultEnum.NO_MATCH, 0L, 0.0, false, false, myMdmResourceMatcherSvc.getMatchResult(myJohn, patient3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScoreOnlySummedWhenMatchFieldMatches() {
|
||||
MdmMatchOutcome outcome = myMdmResourceMatcherSvc.getMatchResult(myJohn, myJohny);
|
||||
assertMatchResult(MdmMatchResultEnum.POSSIBLE_MATCH, 1L, 0.816, false, false, outcome);
|
||||
|
||||
myJohn.addName().setFamily("Smith");
|
||||
myJohny.addName().setFamily("htims");
|
||||
outcome = myMdmResourceMatcherSvc.getMatchResult(myJohn, myJohny);
|
||||
assertMatchResult(MdmMatchResultEnum.POSSIBLE_MATCH, 1L, 0.816, false, false, outcome);
|
||||
|
||||
myJohny.addName().setFamily("Smith");
|
||||
outcome = myMdmResourceMatcherSvc.getMatchResult(myJohn, myJohny);
|
||||
assertMatchResult(MdmMatchResultEnum.MATCH, 3L, 1.816, false, false, outcome);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,6 +234,7 @@ public class ResponseBundleBuilder {
|
|||
theResponseBundleRequest.includes,
|
||||
RestfulServerUtils.prettyPrintResponse(server, theResponseBundleRequest.requestDetails),
|
||||
theResponseBundleRequest.bundleType);
|
||||
|
||||
retval.setSelf(theResponseBundleRequest.linkSelf);
|
||||
|
||||
if (bundleProvider.getCurrentPageOffset() != null) {
|
||||
|
@ -264,21 +265,22 @@ public class ResponseBundleBuilder {
|
|||
// Paging without caching
|
||||
// We're doing offset pages
|
||||
int requestedToReturn = theResponsePage.numToReturn;
|
||||
if (server.getPagingProvider() == null && pageRequest.offset != null) {
|
||||
// There is no paging provider at all, so assume we're querying up to all the results we need every time
|
||||
|
||||
if (pageRequest.offset != null) {
|
||||
requestedToReturn += pageRequest.offset;
|
||||
}
|
||||
|
||||
if (theResponsePage.numTotalResults == null || requestedToReturn < theResponsePage.numTotalResults) {
|
||||
if (!theResponsePage.resourceList.isEmpty()) {
|
||||
retval.setNext(RestfulServerUtils.createOffsetPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails.getRequestPath(),
|
||||
theResponseBundleRequest.requestDetails.getTenantId(),
|
||||
ObjectUtils.defaultIfNull(pageRequest.offset, 0) + theResponsePage.numToReturn,
|
||||
theResponsePage.numToReturn,
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
retval.setNext(RestfulServerUtils.createOffsetPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails.getRequestPath(),
|
||||
theResponseBundleRequest.requestDetails.getTenantId(),
|
||||
ObjectUtils.defaultIfNull(pageRequest.offset, 0) + theResponsePage.numToReturn,
|
||||
theResponsePage.numToReturn,
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
if (pageRequest.offset != null && pageRequest.offset > 0) {
|
||||
int start = Math.max(0, pageRequest.offset - theResponsePage.pageSize);
|
||||
retval.setPrev(RestfulServerUtils.createOffsetPagingLink(
|
||||
|
@ -289,6 +291,7 @@ public class ResponseBundleBuilder {
|
|||
theResponsePage.pageSize,
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
} else if (StringUtils.isNotBlank(bundleProvider.getCurrentPageId())) {
|
||||
// We're doing named pages
|
||||
final String uuid = bundleProvider.getUuid();
|
||||
|
@ -300,6 +303,7 @@ public class ResponseBundleBuilder {
|
|||
bundleProvider.getNextPageId(),
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(bundleProvider.getPreviousPageId())) {
|
||||
retval.setPrev(RestfulServerUtils.createPagingLink(
|
||||
retval,
|
||||
|
@ -308,37 +312,33 @@ public class ResponseBundleBuilder {
|
|||
bundleProvider.getPreviousPageId(),
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
} else if (theResponsePage.searchId != null) {
|
||||
/*
|
||||
* We're doing offset pages - Note that we only return paging links if we actually
|
||||
* included some results in the response. We do this to avoid situations where
|
||||
* people have faked the offset number to some huge number to avoid them getting
|
||||
* back paging links that don't make sense.
|
||||
*/
|
||||
if (theResponsePage.size() > 0) {
|
||||
if (theResponsePage.numTotalResults == null
|
||||
|| theResponseBundleRequest.offset + theResponsePage.numToReturn
|
||||
< theResponsePage.numTotalResults) {
|
||||
retval.setNext((RestfulServerUtils.createPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails,
|
||||
theResponsePage.searchId,
|
||||
theResponseBundleRequest.offset + theResponsePage.numToReturn,
|
||||
theResponsePage.numToReturn,
|
||||
theResponseBundleRequest.getRequestParameters())));
|
||||
}
|
||||
if (theResponseBundleRequest.offset > 0) {
|
||||
int start = Math.max(0, theResponseBundleRequest.offset - theResponsePage.pageSize);
|
||||
retval.setPrev(RestfulServerUtils.createPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails,
|
||||
theResponsePage.searchId,
|
||||
start,
|
||||
theResponsePage.pageSize,
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
|
||||
if (theResponsePage.numTotalResults == null
|
||||
|| theResponseBundleRequest.offset + theResponsePage.numToReturn
|
||||
< theResponsePage.numTotalResults) {
|
||||
retval.setNext((RestfulServerUtils.createPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails,
|
||||
theResponsePage.searchId,
|
||||
theResponseBundleRequest.offset + theResponsePage.numToReturn,
|
||||
theResponsePage.numToReturn,
|
||||
theResponseBundleRequest.getRequestParameters())));
|
||||
}
|
||||
|
||||
if (theResponseBundleRequest.offset > 0) {
|
||||
int start = Math.max(0, theResponseBundleRequest.offset - theResponsePage.pageSize);
|
||||
retval.setPrev(RestfulServerUtils.createPagingLink(
|
||||
retval,
|
||||
theResponseBundleRequest.requestDetails,
|
||||
theResponsePage.searchId,
|
||||
start,
|
||||
theResponsePage.pageSize,
|
||||
theResponseBundleRequest.getRequestParameters()));
|
||||
}
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ package ca.uhn.fhir.rest.server.tenant;
|
|||
import ca.uhn.fhir.i18n.HapiLocalizer;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.UrlPathTokenizer;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -51,12 +51,41 @@ public class UrlBaseTenantIdentificationStrategy implements ITenantIdentificatio
|
|||
@Override
|
||||
public void extractTenant(UrlPathTokenizer theUrlPathTokenizer, RequestDetails theRequestDetails) {
|
||||
String tenantId = null;
|
||||
if (theUrlPathTokenizer.hasMoreTokens()) {
|
||||
tenantId = defaultIfBlank(theUrlPathTokenizer.nextTokenUnescapedAndSanitized(), null);
|
||||
ourLog.trace("Found tenant ID {} in request string", tenantId);
|
||||
theRequestDetails.setTenantId(tenantId);
|
||||
boolean isSystemRequest = (theRequestDetails instanceof SystemRequestDetails);
|
||||
|
||||
// If we were given no partition for a system request, use DEFAULT:
|
||||
if (!theUrlPathTokenizer.hasMoreTokens()) {
|
||||
if (isSystemRequest) {
|
||||
tenantId = "DEFAULT";
|
||||
theRequestDetails.setTenantId(tenantId);
|
||||
ourLog.trace("No tenant ID found for system request; using DEFAULT.");
|
||||
}
|
||||
}
|
||||
|
||||
// We were given at least one URL token:
|
||||
else {
|
||||
|
||||
// peek() won't consume this token:
|
||||
tenantId = defaultIfBlank(theUrlPathTokenizer.peek(), null);
|
||||
|
||||
// If it's "metadata" or starts with "$", use DEFAULT partition and don't consume this token:
|
||||
if (tenantId != null && (tenantId.equals("metadata") || tenantId.startsWith("$"))) {
|
||||
tenantId = "DEFAULT";
|
||||
theRequestDetails.setTenantId(tenantId);
|
||||
ourLog.trace("No tenant ID found for metadata or system request; using DEFAULT.");
|
||||
}
|
||||
|
||||
// It isn't metadata or $, so assume that this first token is the partition name and consume it:
|
||||
else {
|
||||
tenantId = defaultIfBlank(theUrlPathTokenizer.nextTokenUnescapedAndSanitized(), null);
|
||||
if (tenantId != null) {
|
||||
theRequestDetails.setTenantId(tenantId);
|
||||
ourLog.trace("Found tenant ID {} in request string", tenantId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we get to this point without a tenant, it's an invalid request:
|
||||
if (tenantId == null) {
|
||||
HapiLocalizer localizer =
|
||||
theRequestDetails.getServer().getFhirContext().getLocalizer();
|
||||
|
@ -67,7 +96,10 @@ public class UrlBaseTenantIdentificationStrategy implements ITenantIdentificatio
|
|||
|
||||
@Override
|
||||
public String massageServerBaseUrl(String theFhirServerBase, RequestDetails theRequestDetails) {
|
||||
Validate.notNull(theRequestDetails.getTenantId(), "theTenantId is not populated on this request");
|
||||
return theFhirServerBase + '/' + theRequestDetails.getTenantId();
|
||||
String result = theFhirServerBase;
|
||||
if (theRequestDetails.getTenantId() != null) {
|
||||
result += "/" + theRequestDetails.getTenantId();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -435,8 +435,8 @@ class ResponseBundleBuilderTest {
|
|||
|
||||
private void setCanStoreSearchResults(boolean theCanStoreSearchResults) {
|
||||
when(myServer.canStoreSearchResults()).thenReturn(theCanStoreSearchResults);
|
||||
when(myServer.getPagingProvider()).thenReturn(myPagingProvider);
|
||||
if (theCanStoreSearchResults) {
|
||||
when(myServer.getPagingProvider()).thenReturn(myPagingProvider);
|
||||
if (myLimit == null) {
|
||||
when(myPagingProvider.getDefaultPageSize()).thenReturn(DEFAULT_PAGE_SIZE);
|
||||
} else {
|
||||
|
|
|
@ -2,14 +2,11 @@ package ca.uhn.fhir.rest.server.interceptor;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.annotation.RequiredParam;
|
||||
import ca.uhn.fhir.rest.annotation.Search;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
|
@ -53,7 +50,6 @@ import javax.servlet.ReadListener;
|
|||
import javax.servlet.ServletInputStream;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
|
@ -64,18 +60,14 @@ import java.util.List;
|
|||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.doThrow;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.timeout;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.withSettings;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class ConsentInterceptorTest {
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class UrlPathTokenizerTest {
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_withValidPath_tokenizesCorrectly() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer("/root/subdir/subsubdir/file.html");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals(4, tokenizer.countTokens());
|
||||
assertEquals("root", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
assertEquals("subdir", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
assertEquals("subsubdir", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
assertEquals("file.html", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
assertFalse(tokenizer.hasMoreTokens());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
"", // actually empty
|
||||
"///////", // effectively empty
|
||||
"// / / / / " // effectively empty with extraneous whitespace
|
||||
})
|
||||
void urlPathTokenizer_withEmptyPath_returnsEmpty(String thePath) {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer(thePath);
|
||||
assertEquals(0, tokenizer.countTokens());
|
||||
}
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_withNullPath_returnsEmpty() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer(null);
|
||||
assertEquals(0, tokenizer.countTokens());
|
||||
}
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_withSinglePathElement_returnsSingleToken() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer("hello");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals("hello", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
}
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_withEscapedPath_shouldUnescape() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer("Homer%20Simpson");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals("Homer Simpson", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
|
||||
tokenizer = new UrlPathTokenizer("hack%2Fslash");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals("hack/slash", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
}
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_peek_shouldNotConsumeTokens() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer("this/that");
|
||||
assertEquals(2, tokenizer.countTokens());
|
||||
tokenizer.peek();
|
||||
assertEquals(2, tokenizer.countTokens());
|
||||
}
|
||||
|
||||
@Test
|
||||
void urlPathTokenizer_withSuspiciousCharacters_sanitizesCorrectly() {
|
||||
UrlPathTokenizer tokenizer = new UrlPathTokenizer("<DROP TABLE USERS>");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals("<DROP TABLE USERS>", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
|
||||
tokenizer = new UrlPathTokenizer("'\n\r\"");
|
||||
assertTrue(tokenizer.hasMoreTokens());
|
||||
assertEquals("' "", tokenizer.nextTokenUnescapedAndSanitized());
|
||||
}
|
||||
}
|
|
@ -321,6 +321,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext
|
|||
throw new UnsupportedOperationException(Msg.code(234));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResourceRaw(Class<T> class_, String uri) {
|
||||
return fetchResource(class_, uri);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends org.hl7.fhir.r5.model.Resource> T fetchResource(Class<T> theClass, String theUri) {
|
||||
if (myValidationSupport == null || theUri == null) {
|
||||
|
|
|
@ -0,0 +1,173 @@
|
|||
package ca.uhn.fhir.rest.server.tenant;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.HapiLocalizer;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.UrlPathTokenizer;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class UrlBaseTenantIdentificationStrategyTest {
|
||||
|
||||
private final static String BASE_URL = "http://localhost:8888";
|
||||
|
||||
@Mock
|
||||
private RequestDetails myRequestDetails;
|
||||
private static SystemRequestDetails ourSystemRequestDetails;
|
||||
@Mock
|
||||
private IRestfulServerDefaults myRestfulServerDefaults;
|
||||
@Mock
|
||||
private FhirContext myFHIRContext;
|
||||
@Mock
|
||||
private HapiLocalizer myHapiLocalizer;
|
||||
|
||||
private static UrlBaseTenantIdentificationStrategy ourTenantStrategy;
|
||||
private UrlPathTokenizer myUrlTokenizer;
|
||||
|
||||
@BeforeAll
|
||||
static void setup() {
|
||||
ourSystemRequestDetails = new SystemRequestDetails();
|
||||
ourTenantStrategy = new UrlBaseTenantIdentificationStrategy();
|
||||
}
|
||||
|
||||
@Test
|
||||
void massageBaseUrl_givenBaseUrlAndTenant_shouldApplyTenant() {
|
||||
//given a tenant id of TENANT1
|
||||
when(myRequestDetails.getTenantId()).thenReturn("TENANT1");
|
||||
|
||||
//when we massage the server base url
|
||||
String actual = ourTenantStrategy.massageServerBaseUrl(BASE_URL, myRequestDetails);
|
||||
|
||||
//then we should see /TENANT1 in the url
|
||||
assertEquals(BASE_URL + "/TENANT1", actual);
|
||||
}
|
||||
|
||||
@Test
|
||||
void massageBaseUrl_givenBaseUrlAndNullTenant_shouldReturnBaseUrl() {
|
||||
//given a null tenant id
|
||||
when(myRequestDetails.getTenantId()).thenReturn(null);
|
||||
|
||||
//when we massage our base url
|
||||
String actual = ourTenantStrategy.massageServerBaseUrl(BASE_URL, myRequestDetails);
|
||||
|
||||
//then nothing should happen
|
||||
assertEquals(BASE_URL, actual);
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenNormalRequestAndExplicitTenant_shouldUseTenant() {
|
||||
//given a Patient request on MYTENANT
|
||||
myUrlTokenizer = new UrlPathTokenizer("MYTENANT/Patient");
|
||||
|
||||
//when we extract the tenant identifier
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, myRequestDetails);
|
||||
|
||||
//then we should see MYTENANT
|
||||
verify(myRequestDetails, times(1)).setTenantId("MYTENANT");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenSystemRequestWithNoTenant_shouldUseDefault() {
|
||||
//given any request that starts with $ and no given partition name
|
||||
myUrlTokenizer = new UrlPathTokenizer("$partition-management-create-partition");
|
||||
|
||||
//when we try to extract the tenant id
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, ourSystemRequestDetails);
|
||||
|
||||
//then we should see that it defaulted to the DEFAULT partition
|
||||
assertEquals("DEFAULT", ourSystemRequestDetails.getTenantId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenSystemRequestWithExplicitTenant_shouldUseTenant() {
|
||||
//given a request that starts with $ on a named partition
|
||||
myUrlTokenizer = new UrlPathTokenizer("MYTENANT/$partition-management-create-partition");
|
||||
|
||||
//when we extract the tenant from the request
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, ourSystemRequestDetails);
|
||||
|
||||
//then we should see MYTENANT
|
||||
assertEquals("MYTENANT", ourSystemRequestDetails.getTenantId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenMetadataRequestWithNoTenant_shouldUseDefault() {
|
||||
//given a metadata request with no specified partition name
|
||||
myUrlTokenizer = new UrlPathTokenizer("metadata");
|
||||
|
||||
//when we try to extract the tenant from the request
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, myRequestDetails);
|
||||
|
||||
//then we should see that it defaulted to the DEFAULT partition
|
||||
verify(myRequestDetails, times(1)).setTenantId("DEFAULT");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenMetadataRequestWithExplicitTenant_shouldUseTenant() {
|
||||
//given a metadata request on a named partition
|
||||
myUrlTokenizer = new UrlPathTokenizer("MYTENANT/metadata");
|
||||
|
||||
//when we extract the tenant id
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, myRequestDetails);
|
||||
|
||||
//then we should see MYTENANT
|
||||
verify(myRequestDetails, times(1)).setTenantId("MYTENANT");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenPatientRequestAndNoTenant_shouldInterpretPatientAsPartition() {
|
||||
//given a Patient request with no partition name specified
|
||||
myUrlTokenizer = new UrlPathTokenizer("Patient");
|
||||
|
||||
//when we try to extract the tenant from the request
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, myRequestDetails);
|
||||
|
||||
//then we should see that it interpreted Patient as the partition name
|
||||
verify(myRequestDetails, times(1)).setTenantId("Patient");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenEmptyURLNoPartition_shouldThrowException() {
|
||||
//given an empty URL with no partition name
|
||||
when(myRequestDetails.getServer()).thenReturn(myRestfulServerDefaults);
|
||||
when(myRestfulServerDefaults.getFhirContext()).thenReturn(myFHIRContext);
|
||||
when(myFHIRContext.getLocalizer()).thenReturn(myHapiLocalizer);
|
||||
myUrlTokenizer = new UrlPathTokenizer("");
|
||||
|
||||
//when we try to extract the tenant from the request
|
||||
InvalidRequestException ire = assertThrows(InvalidRequestException.class, () -> {
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, myRequestDetails);
|
||||
});
|
||||
|
||||
//then we should see an exception thrown with HAPI-0307 in it
|
||||
verify(myHapiLocalizer, times(1)).getMessage(RestfulServer.class, "rootRequest.multitenant");
|
||||
assertTrue(ire.getMessage().contains("HAPI-0307"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractTenant_givenSystemRequestWithEmptyUrl_shouldUseDefaultPartition() {
|
||||
//given a system request with a blank url (is this even a valid test case?)
|
||||
myUrlTokenizer = new UrlPathTokenizer("");
|
||||
|
||||
//when we try to extract the tenant id
|
||||
ourTenantStrategy.extractTenant(myUrlTokenizer, ourSystemRequestDetails);
|
||||
|
||||
//then we should see that it defaulted to the DEFAULT partition
|
||||
assertEquals("DEFAULT", ourSystemRequestDetails.getTenantId());
|
||||
}
|
||||
}
|
|
@ -376,6 +376,11 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResourceRaw(Class<T> class_, String uri) {
|
||||
return fetchResource(class_, uri);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResource(Class<T> class_, String uri) {
|
||||
|
||||
|
@ -443,8 +448,10 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> fetchTypeDefinitions(String n) {
|
||||
throw new UnsupportedOperationException(Msg.code(2329));
|
||||
public List<StructureDefinition> fetchTypeDefinitions(String typeName) {
|
||||
List<StructureDefinition> allStructures = new ArrayList<>(allStructures());
|
||||
allStructures.removeIf(sd -> !sd.hasType() || !sd.getType().equals(typeName));
|
||||
return allStructures;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -897,7 +897,7 @@
|
|||
</licenses>
|
||||
|
||||
<properties>
|
||||
<fhir_core_version>6.0.15</fhir_core_version>
|
||||
<fhir_core_version>6.0.22</fhir_core_version>
|
||||
<spotless_version>2.37.0</spotless_version>
|
||||
<ucum_version>1.0.3</ucum_version>
|
||||
<surefire_jvm_args>-Dfile.encoding=UTF-8 -Xmx2048m</surefire_jvm_args>
|
||||
|
|
Loading…
Reference in New Issue