Merge remote-tracking branch 'origin/master' into do-20240206-core-bump-6-2-16

This commit is contained in:
dotasek 2024-02-26 09:15:07 -05:00
commit e3fe1eaab2
145 changed files with 2742 additions and 467 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -627,6 +627,8 @@ public class BundleUtil {
//noinspection EnumSwitchStatementWhichMissesCases
switch (requestType) {
case PUT:
case DELETE:
case PATCH:
conditionalUrl = url != null && url.contains("?") ? url : null;
break;
case POST:

View File

@ -315,6 +315,7 @@ public class UrlUtil {
return theCtx.getResourceDefinition(resourceName);
}
@Nonnull
public static Map<String, String[]> parseQueryString(String theQueryString) {
HashMap<String, List<String>> map = new HashMap<>();
parseQueryString(theQueryString, map);

View File

@ -136,7 +136,6 @@ public enum VersionEnum {
V7_0_0,
V7_0_1,
V7_1_0,
V7_2_0;

View File

@ -69,4 +69,12 @@ public class BundleEntryMutator {
BaseRuntimeChildDefinition resourceChild = myEntryDefinition.getChildByName("resource");
resourceChild.getMutator().setValue(myEntry, theUpdatedResource);
}
public void setRequestIfNoneExist(FhirContext theFhirContext, String theIfNoneExist) {
BaseRuntimeChildDefinition requestUrlChildDef = myRequestChildContentsDef.getChildByName("ifNoneExist");
IPrimitiveType<?> url = ParametersUtil.createString(theFhirContext, theIfNoneExist);
for (IBase nextRequest : myRequestChildDef.getAccessor().getValues(myEntry)) {
requestUrlChildDef.getMutator().addValue(nextRequest, url);
}
}
}

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.util.bundle;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import org.hl7.fhir.instance.model.api.IBaseResource;
public class ModifiableBundleEntry {
@ -58,4 +59,16 @@ public class ModifiableBundleEntry {
public void setResource(IBaseResource theUpdatedResource) {
myBundleEntryMutator.setResource(theUpdatedResource);
}
public RequestTypeEnum getRequestMethod() {
return myBundleEntryParts.getRequestType();
}
public String getConditionalUrl() {
return myBundleEntryParts.getConditionalUrl();
}
public void setRequestIfNoneExist(FhirContext theFhirContext, String theIfNoneExist) {
myBundleEntryMutator.setRequestIfNoneExist(theFhirContext, theIfNoneExist);
}
}

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -55,13 +55,13 @@ import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@SuppressWarnings("unused")
/**
* Examples integrated into our documentation.
*/
@SuppressWarnings({"unused", "WriteOnlyObject", "UnnecessaryLocalVariable"})
public class AuthorizationInterceptors {
public class PatientResourceProvider implements IResourceProvider {
public static class PatientResourceProvider implements IResourceProvider {
@Override
public Class<? extends IBaseResource> getResourceType() {
@ -74,8 +74,8 @@ public class AuthorizationInterceptors {
}
}
@SuppressWarnings({"ConstantConditions", "InnerClassMayBeStatic"})
// START SNIPPET: patientAndAdmin
@SuppressWarnings("ConstantConditions")
public class PatientAndAdminAuthorizationInterceptor extends AuthorizationInterceptor {
@Override
@ -265,6 +265,7 @@ public class AuthorizationInterceptors {
}
@SuppressWarnings("InnerClassMayBeStatic")
// START SNIPPET: narrowing
public class MyPatientSearchNarrowingInterceptor extends SearchNarrowingInterceptor {
@ -300,6 +301,13 @@ public class AuthorizationInterceptors {
}
// END SNIPPET: narrowing
public void narrowingConditional() {
// START SNIPPET: narrowingConditional
SearchNarrowingInterceptor interceptor = new SearchNarrowingInterceptor();
interceptor.setNarrowConditionalUrls(true);
// END SNIPPET: narrowingConditional
}
@SuppressWarnings("SpellCheckingInspection")
public void rsNarrowing() {
RestfulServer restfulServer = new RestfulServer();
@ -330,6 +338,7 @@ public class AuthorizationInterceptors {
// END SNIPPET: rsnarrowing
}
@SuppressWarnings("InnerClassMayBeStatic")
// START SNIPPET: narrowingByCode
public class MyCodeSearchNarrowingInterceptor extends SearchNarrowingInterceptor {

View File

@ -0,0 +1,7 @@
---
type: add
issue: 5593
title: "Hibernate SQL log filtering capability was added.
See [Hibernate SQL Log Filtering](/hapi-fhir/docs/appendix/logging.html#hibernate-sql-log-filtering)."

View File

@ -0,0 +1,5 @@
type: fix
issue: 5654
title: "Fixed a MeasureReport measureScoring bug impacting any measures currently using denominator-exception population will
incorrectly calculate the score without following specification. This bug adds an extension to MeasureReport Groups to capture calculated denominator and
numerator to bring transparency to the measureScore calculation and act as a dataSource of measureScore instead of behind the scenes calculations."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5680
title: "Previously, after registering built-in interceptor `PatientIdPartitionInterceptor`, while performing
an async system bulk export, the `$poll-export-status` operation would fail with a `NullPointerException`. This has been fixed."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 5700
title: "Previously (this release cycle), when you call `RemoteTerminologyServiceValidationSupport` method `lookupCode`
with a `CodeSystem` that has properties that are not `string` or `Coding`, the method would throw an exception.
It should instead accept any type and convert any unsupported type to `string`. This has been fixed."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 5692
title: "The JPA WebSocket delivery mechanism now supports the `content` delivery mode.
Thanks to Artiom Darie for the contribution!"

View File

@ -0,0 +1,8 @@
---
type: add
jira: SMILE-7971
issue: 5712
title: "The SearchNarrowingInterceptor can now optionally be configured to also apply
URL narrowing to conditional URLs used by conditional create/update/delete/patch
operations, both as raw HTTP transactions as well as within FHIR transaction
Bundles."

View File

@ -0,0 +1,4 @@
---
type: security
issue: 5717
title: "Fixed a potential XSS vulnerability in the HAPI FHIR Testpage Overlay module."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5720
jira: SMILE-8091
title: "System-level and Type-level History operations on the JPA server (i.e. `_history`) could sometimes
contain duplicates or miss entries when a large number of matching resources on the server had identical
update timestamps. This has been corrected."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 5722
title: "An incorrect migration script caused a failure when upgrading to HAPI FHIR 7.0.0 on
PostgreSQL if the database was not in the `public` schema. Thanks to GitHub
user @pano-smals for the contribution!"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5734
title: "A scheduled job to clean up the Search URL table used to enforce uniqueness of conditional create/update jobs was created as a local job and not a clustered job. This has been fixed."

View File

@ -0,0 +1,8 @@
---
- item:
type: "add"
title: "The version of a few dependencies have been bumped to the latest versions
(dependent HAPI modules listed in brackets):
<ul>
<li>Apache Commons Compress (Base): 1.21 -&gt; 1.26.0</li>
</ul>"

View File

@ -64,3 +64,48 @@ To enable detailed logging of client requests and responses (what URL is being r
To enable detailed logging of server requests and responses, an interceptor may be added to the server which logs each transaction. See [Logging Interceptor](/docs/interceptors/built_in_server_interceptors.html#logging_interceptor) for more information.
# Hibernate SQL Log Filtering
<div class="helpWarningCalloutBox">
Hibernate SQL debug logging can potentially affect your system performance.
This filtering function reduces the amount of logging generated by hibernate SQL logging function, `after` the logging code is executed, so hibernate SQL logging performance degradation still applies when using it.
</div>
Hibernate logs SQL statements from a single class, which makes hard to obtain SQL logs only for a specific feature, as logging includes all background processes.
Hibernate SQL log filtering feature allows you to filter out Hibernate SQL logging entries by adding blacklist filters in a `hibernate-sql-log-filters.txt` classpath file.
Note that Hibernate SQL log filtering feature works by `filtering out` SQL logs, meaning that each filter you add, will reduce SQL debug logging generated by any process running in the system, like background processes or other user interactions.
Hibernate SQL log filtering and its filter-refreshing task activate when `org.hibernate.SQL` logging is set to `debug` level or higher, and deactivate when logging is set to `info` level or lower.
The feature doesn't affect performance in any way while inactive, also shutting down the background filter-refreshing process. It can affect performance when active, so the feature is intended only as a debugging tool.
The filter lines must start with one of:
1. `stack: ` to filter log entries produced by code which stack trace has a line which starts with filter string,
2. `sw: ` to filter log entries which start with filter string, or
3. `frag: ` to filter log entries which contain filter string
A sample file is provided with filter lines commented out. These filter lines, once uncommented, filter out most background processes logging.
## Hibernate SQL Log Filtering example
The sample use case is to be able to identify the SQL queries which are being run for some operation.
The steps would be:
1. Change your logging configuration entry `org.hibernate.SQL` to `DEBUG`
As soon as your logging system refreshes its configuration, you will see in the console a vast amount of SQL logging produced by you system background tasks.
2. Uncomment the filters defined in your classpath file `hibernate-sql-log-filters.txt`.
As soon as the filtering feature refreshes (5 seconds delay), the console SQL logging should stop.
3. Run the operation which SQL queries you need to debug.
Your search operation SQL logs should be logged.
Note: This example was set to allow you to observe the effect of the filters on the log output, however swapping steps 1 and 2 would work better by avoiding the initial vast logging output.

View File

@ -193,7 +193,7 @@ HAPI FHIR provides an interceptor that can be used to implement consent rules an
# Security: Search Narrowing
HAPI FHIR provides an interceptor that can be used to implement consent rules and directives. See [Consent Interceptor](/docs/security/consent_interceptor.html) for more information.
HAPI FHIR provides an interceptor that can be used to implement consent rules and directives. See [Search Narrowing Interceptor](/docs/security/search_narrowing_interceptor.html) for more information.
# Security: Rejecting Unsupported HTTP Verbs

View File

@ -25,6 +25,24 @@ An example of this interceptor follows:
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java|narrowing}}
```
# Narrowing Conditional URLs
By default, this interceptor will narrow URLs for FHIR search operations only. The
interceptor can also be configured to narrow URLs on conditional operations.
When this feature is enabled request URLs are also narrowed for the following FHIR operations:
* Conditional Create (The `If-None-Exist` header is narrowed)
* Conditional Update (The request URL is narrowed if it is a conditional URL)
* Conditional Delete (The request URL is narrowed if it is a conditional URL)
* Conditional Patch (The request URL is narrowed if it is a conditional URL)
The following example shows how to enable conditional URL narrowing on the interceptor.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java|narrowingConditional}}
```
<a name="constraining-by-valueset-membership"/>
# Constraining by ValueSet Membership

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -117,6 +117,16 @@ public class JaxRsRequest extends RequestDetails {
return requestHeader == null ? Collections.<String>emptyList() : requestHeader;
}
@Override
public void addHeader(String theName, String theValue) {
throw new UnsupportedOperationException(Msg.code(2499) + "Headers can not be modified in JAX-RS");
}
@Override
public void setHeaders(String theName, List<String> theValue) {
throw new UnsupportedOperationException(Msg.code(2500) + "Headers can not be modified in JAX-RS");
}
@Override
public Object getAttribute(String theAttributeName) {
return myAttributes.get(theAttributeName);

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.logging;
import com.google.common.annotations.VisibleForTesting;
import java.util.LinkedList;
import java.util.List;
public abstract class BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
protected final List<String> myFilterDefinitions = new LinkedList<>();
@Override
public boolean evaluateFilterLine(String theFilterLine) {
boolean matched = theFilterLine.startsWith(getPrefix());
if (matched) {
myFilterDefinitions.add(
theFilterLine.substring(getPrefix().length()).trim());
}
return matched;
}
@Override
public void clearDefinitions() {
myFilterDefinitions.clear();
}
@Override
public Object getLockingObject() {
return myFilterDefinitions;
}
@VisibleForTesting
public void setFilterDefinitions(List<String> theFilterDefinitions) {
synchronized (myFilterDefinitions) {
myFilterDefinitions.clear();
myFilterDefinitions.addAll(theFilterDefinitions);
}
}
}

View File

@ -0,0 +1,19 @@
package ca.uhn.fhir.jpa.logging;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.service.spi.ServiceContributor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FilteringSqlLoggerImplContributor implements ServiceContributor {
private static final Logger logger = LoggerFactory.getLogger(FilteringSqlLoggerImplContributor.class);
@Override
public void contribute(StandardServiceRegistryBuilder serviceRegistryBuilder) {
logger.info("Adding service: SqlStatementFilteringLogger");
serviceRegistryBuilder.addService(
SqlStatementLogger.class, new SqlStatementFilteringLogger(SqlLoggerFilteringUtil.getInstance()));
}
}

View File

@ -0,0 +1,17 @@
package ca.uhn.fhir.jpa.logging;
/**
* Contract for Filters used by the utility class SqlStatementFilteringLogger
*/
public interface ISqlLoggerFilter {
boolean match(String theStatement);
boolean evaluateFilterLine(String theFilterLine);
String getPrefix();
void clearDefinitions();
Object getLockingObject();
}

View File

@ -0,0 +1,175 @@
package ca.uhn.fhir.jpa.logging;
import ca.uhn.fhir.i18n.Msg;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/**
* Utility for the Hibernate SQL log filtering feature
*/
public class SqlLoggerFilteringUtil {
private static final Logger ourLog = LoggerFactory.getLogger(SqlLoggerFilteringUtil.class);
@SuppressWarnings("FieldMayBeFinal") // so test can inject mocks
public static int FILTER_UPDATE_INTERVAL_SECS = 5;
public static final String FILTER_FILE_PATH = "hibernate-sql-log-filters.txt";
private static final AtomicInteger ourRefreshCount = new AtomicInteger();
private final Logger hibernateLogger = LoggerFactory.getLogger("org.hibernate.SQL");
private final List<ISqlLoggerFilter> mySqlLoggerFilters;
private CountDownLatch myRefreshDoneLatch;
private volatile ScheduledThreadPoolExecutor myRefreshExecutor;
private static final SqlLoggerFilteringUtil myInstance = new SqlLoggerFilteringUtil();
// singleton
public static SqlLoggerFilteringUtil getInstance() {
return myInstance;
}
private SqlLoggerFilteringUtil() {
ourRefreshCount.set(0);
mySqlLoggerFilters = List.of(
new SqlLoggerStartsWithFilter(), new SqlLoggerFragmentFilter(), new SqlLoggerStackTraceFilter());
}
public boolean allowLog(String theStatement) {
// Invoked when org.hibernate.SQL logger is DEBUG enabled
// Only initialize if method ever invoked, as it is usually not the case.
if (myRefreshExecutor == null || myRefreshExecutor.isShutdown()) {
startFilterRefreshExecutor();
}
boolean allowLog = isStatementAllowed(theStatement);
ourLog.trace("SQL log {}: {}", allowLog ? "allowed" : "filtered out", theStatement);
return allowLog;
}
private boolean isStatementAllowed(String theStatement) {
return mySqlLoggerFilters.stream().noneMatch(f -> f.match(theStatement));
}
private synchronized void startFilterRefreshExecutor() {
if (myRefreshExecutor != null && !myRefreshExecutor.isShutdown()) {
ourLog.debug(
"myRefreshExecutor terminated state: {}, terminating state: {}",
myRefreshExecutor.isTerminated(),
myRefreshExecutor.isTerminating());
return;
}
myRefreshDoneLatch = new CountDownLatch(1);
myRefreshExecutor = new ScheduledThreadPoolExecutor(1);
myRefreshExecutor.scheduleAtFixedRate(
new UpdateFiltersTask(), 0, FILTER_UPDATE_INTERVAL_SECS, TimeUnit.SECONDS);
ourLog.info("Starting SQL log filters refresh executor");
// wait for first refresh cycle to complete
try {
// reset to use in case executor is restarted
myRefreshDoneLatch.await();
} catch (InterruptedException ignored) {
ourLog.warn("Interrupted from sleep");
}
}
private synchronized void stopFilterRefreshExecutor() {
if (myRefreshExecutor == null || myRefreshExecutor.isShutdown()) {
return;
}
ourLog.info("Stopping SQL log filters refresh executor");
myRefreshExecutor.shutdown();
}
private class UpdateFiltersTask implements Runnable {
@Override
public void run() {
ourLog.debug("\n\n\t\t\tRefreshing hibernate SQL filters!\n");
try {
refreshFilters(FILTER_FILE_PATH);
} catch (Exception theE) {
ourLog.error("Hibernate SQL log filters not refreshed. Exception: {} \n{}", theE, theE.getStackTrace());
throw new RuntimeException(Msg.code(2478) + theE);
} finally {
myRefreshDoneLatch.countDown();
}
int count = ourRefreshCount.getAndIncrement();
ourLog.debug("SQL logging filters {}. Refresh count: {}", count == 0 ? "initialized" : "refreshed", count);
}
}
@VisibleForTesting
public void refreshFilters(String theFilterFilePath) throws IOException {
ourLog.debug("SQL log DEBUG enabled: {}", hibernateLogger.isDebugEnabled());
if (!hibernateLogger.isDebugEnabled()) {
// in case startFilterRefreshExecutor is waiting for refresh to finish
myRefreshDoneLatch.countDown();
stopFilterRefreshExecutor();
return;
}
ourLog.debug("Starting filters refresh");
File resource = new ClassPathResource(theFilterFilePath).getFile();
List<String> filterDefinitionLines = Files.readAllLines(resource.toPath());
for (ISqlLoggerFilter filter : mySqlLoggerFilters) {
synchronized (filter.getLockingObject()) {
filter.clearDefinitions();
filterDefinitionLines.stream()
.filter(l -> !l.startsWith("#"))
.filter(filterDef -> StringUtils.isNotBlank(filterDef.trim()))
.forEach(filterLine -> presentFilterDefinitionLineToFilters(filterLine, mySqlLoggerFilters));
}
}
ourLog.debug("Ended filter refresh");
}
private void presentFilterDefinitionLineToFilters(String theFilterLine, List<ISqlLoggerFilter> theFilterList) {
for (ISqlLoggerFilter filterRef : theFilterList) {
if (filterRef.evaluateFilterLine(theFilterLine)) {
// only one filter takes a filter line
return;
}
}
int spaceIdx = theFilterLine.indexOf(" ");
ourLog.warn(
"SQL log filtering line prefix not recognized: '{}'. Must be one of: '#', {}",
theFilterLine.substring(0, spaceIdx == -1 ? theFilterLine.length() : spaceIdx),
theFilterList.stream().map(ISqlLoggerFilter::getPrefix).collect(Collectors.joining("', '", "'", "'")));
}
@VisibleForTesting
public static int getRefreshCountForTests() {
return ourRefreshCount.get();
}
@VisibleForTesting
public static void setFilterUpdateIntervalSecs(int theFilterUpdateIntervalSecs) {
FILTER_UPDATE_INTERVAL_SECS = theFilterUpdateIntervalSecs;
}
@VisibleForTesting
public List<ISqlLoggerFilter> getSqlLoggerFilters() {
return mySqlLoggerFilters;
}
}

View File

@ -0,0 +1,21 @@
package ca.uhn.fhir.jpa.logging;
/**
* When filtering active, Filters hibernate SQL log lines containing the defined fragment
*/
public class SqlLoggerFragmentFilter extends BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
public static final String PREFIX = "frag:";
@Override
public boolean match(String theStatement) {
synchronized (myFilterDefinitions) {
return myFilterDefinitions.stream().anyMatch(theStatement::contains);
}
}
@Override
public String getPrefix() {
return PREFIX;
}
}

View File

@ -0,0 +1,69 @@
package ca.uhn.fhir.jpa.logging;
import ca.uhn.fhir.i18n.Msg;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* When filtering active, filters hibernate SQL log lines which generating code stack trace
* contains a line which starts with the defined string
*/
public class SqlLoggerStackTraceFilter extends BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
private static final Logger ourLog = LoggerFactory.getLogger(SqlLoggerStackTraceFilter.class);
public static final String PREFIX = "stack:";
@Override
public boolean match(String theStatement) {
// safe copy to shorten synchronized time
LinkedList<String> filterDefinitionsSafeCopy;
synchronized (myFilterDefinitions) {
filterDefinitionsSafeCopy = new LinkedList<>(myFilterDefinitions);
}
Set<String> cdrClassesInStack =
getStackTraceStream().map(StackTraceElement::getClassName).collect(Collectors.toSet());
if (cdrClassesInStack.isEmpty()) {
ourLog.trace("No CDR or HAPI-FHIR class found in stack");
return false;
}
boolean matched;
try {
matched = cdrClassesInStack.stream().anyMatch(clName -> filterDefinitionsSafeCopy.stream()
.filter(Objects::nonNull)
.anyMatch(clName::startsWith));
} catch (Exception theE) {
ourLog.debug(
"myFilterDefinitions: {}",
filterDefinitionsSafeCopy.stream()
.map(String::valueOf)
.collect(Collectors.joining("\n", "\n", "")));
ourLog.debug(
"cdrClassesInStack: {}",
cdrClassesInStack.stream().map(String::valueOf).collect(Collectors.joining("\n", "\n", "")));
throw new RuntimeException(Msg.code(2479) + theE);
}
return matched;
}
@VisibleForTesting // Thread can't be mocked
public Stream<StackTraceElement> getStackTraceStream() {
return Arrays.stream(Thread.currentThread().getStackTrace());
}
@Override
public String getPrefix() {
return PREFIX;
}
}

View File

@ -0,0 +1,20 @@
package ca.uhn.fhir.jpa.logging;
/**
* When filtering active, Filters hibernate SQL log lines starting with the defined string
*/
public class SqlLoggerStartsWithFilter extends BaseSqlLoggerFilterImpl {
private static final String PREFIX = "sw:";
@Override
public boolean match(String theStatement) {
synchronized (myFilterDefinitions) {
return myFilterDefinitions.stream().anyMatch(theStatement::startsWith);
}
}
@Override
public String getPrefix() {
return PREFIX;
}
}

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.logging;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreLogging;
import org.hibernate.service.Service;
import org.jboss.logging.Logger;
/**
* Logger set as a hibernate service to allow filtering out SQL statements based in statement content
* instead of package, as hibernate logs always from same class disallowing package discrimination.
* Note that when content includes class and package name, it can be used for filtering
* <p/>
* It self-activates when "org.hibernate.SQL" logger is set to DEBUG.
* Deactivates fully (even config-checking executor is shutdown) when "org.hibernate.SQL" logger is set lower than DEBUG.
* To use, simply add filtering statement lines to the 'sql-filters/hibernate-sql-log-filters.txt' classpath file
* starting with:
* <li>
* <ul>'sw:' to filter statements which start with the following string</ul>
* <ul>'frag:' to filter statements which contain the fragment string</ul>
* <ul>'stack:' to filter statements logging which stack trace contain the following string</ul>
* </li>
*/
public class SqlStatementFilteringLogger extends SqlStatementLogger implements Service {
private static final Logger LOG = CoreLogging.logger("org.hibernate.SQL");
private final SqlLoggerFilteringUtil myFilteringUtil;
public SqlStatementFilteringLogger(SqlLoggerFilteringUtil theFilteringUtil) {
super();
myFilteringUtil = theFilteringUtil;
}
@Override
public void logStatement(String statement) {
if (LOG.isDebugEnabled() && myFilteringUtil.allowLog(statement)) {
super.logStatement(statement);
}
}
}

View File

@ -0,0 +1 @@
ca.uhn.fhir.jpa.logging.FilteringSqlLoggerImplContributor

View File

@ -0,0 +1,19 @@
##############################################################################################
# Filters suppress log statement according to prefix when:
# _ stack: there is an entry in stack trace which class name starts with filter string
# _ sw: log statement starts with filter string
# _ frag: log statement contains filter string
#
# To add a filter extend BaseSqlLoggerFilterImpl.
#
##############################################################################################
# stack: ca.uhn.fhir.jpa.search.SearchUrlJobMaintenanceSvcImpl
# stack: ca.uhn.fhir.jpa.subscription.ResourceModifiedMessagePersistenceSvcImpl
# stack: ca.uhn.fhir.jpa.term.TermReindexingSvcImpl
# stack: ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl
# stack: ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl
# stack: ca.uhn.fhir.jpa.batch2.JpaJobPersistenceImpl
# stack: ca.uhn.fhir.jpa.term.TermReadSvcImpl
# stack: ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl
# stack: ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl
# stack: ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -124,7 +124,20 @@ public class HistoryBuilder {
from.fetch("myProvenance", JoinType.LEFT);
criteriaQuery.orderBy(cb.desc(from.get("myUpdated")));
/*
* The sort on myUpdated is the important one for _history operations, but there are
* cases where multiple pages of results all have the exact same myUpdated value (e.g.
* if they were all ingested in a single FHIR transaction). So we put a secondary sort
* on the resource PID just to ensure that the sort is stable across queries.
*
* There are indexes supporting the myUpdated sort at each level (system/type/instance)
* but those indexes don't include myResourceId. I don't think that should be an issue
* since myUpdated should generally be unique anyhow. If this ever becomes an issue,
* we might consider adding the resource PID to indexes IDX_RESVER_DATE and
* IDX_RESVER_TYPE_DATE in the future.
* -JA 2024-04-21
*/
criteriaQuery.orderBy(cb.desc(from.get("myUpdated")), cb.desc(from.get("myResourceId")));
TypedQuery<ResourceHistoryTable> query = myEntityManager.createQuery(criteriaQuery);

View File

@ -183,7 +183,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
{
version.executeRawSql(
"20231212.1",
"CREATE INDEX CONCURRENTLY idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
"CREATE INDEX CONCURRENTLY idx_sp_string_hash_nrm_pattern_ops ON hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
.setTransactional(false)
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
.onlyIf(
@ -197,7 +197,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
"Index idx_sp_string_hash_nrm_pattern_ops already exists");
version.executeRawSql(
"20231212.2",
"CREATE UNIQUE INDEX CONCURRENTLY idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
"CREATE UNIQUE INDEX CONCURRENTLY idx_sp_uri_hash_identity_pattern_ops ON hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
.setTransactional(false)
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
.onlyIf(

View File

@ -35,7 +35,10 @@ import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
/**
* This service ensures uniqueness of resources during create or create-on-update by storing the resource searchUrl.
* This service ensures uniqueness of resources during create or create-on-update
* by storing the resource searchUrl.
*
* @see SearchUrlJobMaintenanceSvcImpl which deletes stale entities
*/
@Transactional
@Service

View File

@ -61,7 +61,7 @@ public class SearchUrlJobMaintenanceSvcImpl implements ISearchUrlJobMaintenanceS
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(SearchUrlMaintenanceJob.class.getName());
jobDetail.setJobClass(SearchUrlMaintenanceJob.class);
theSchedulerService.scheduleLocalJob(10 * DateUtils.MILLIS_PER_MINUTE, jobDetail);
theSchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_MINUTE, jobDetail);
}
private Date calculateCutoffDate() {

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -29,6 +29,15 @@ import jakarta.persistence.TemporalType;
import java.util.Date;
/**
* This entity is used to enforce uniqueness on a given search URL being
* used as a conditional operation URL, e.g. a conditional create or a
* conditional update. When we perform a conditional operation that is
* creating a new resource, we store an entity with the conditional URL
* in this table. The URL is the PK of the table, so the database
* ensures that two concurrent threads don't accidentally create two
* resources with the same conditional URL.
*/
@Entity
@Table(
name = "HFJ_RES_SEARCH_URL",

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -41,9 +41,10 @@ import org.springframework.web.socket.WebSocketSession;
import org.springframework.web.socket.handler.TextWebSocketHandler;
import java.io.IOException;
import java.util.Optional;
public class SubscriptionWebsocketHandler extends TextWebSocketHandler implements WebSocketHandler {
private static Logger ourLog = LoggerFactory.getLogger(SubscriptionWebsocketHandler.class);
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionWebsocketHandler.class);
@Autowired
protected WebsocketConnectionValidator myWebsocketConnectionValidator;
@ -51,6 +52,8 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement
@Autowired
SubscriptionChannelRegistry mySubscriptionChannelRegistry;
private IState myState = new InitialState();
/**
* Constructor
*/
@ -58,8 +61,6 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement
super();
}
private IState myState = new InitialState();
@Override
public void afterConnectionClosed(WebSocketSession theSession, CloseStatus theStatus) throws Exception {
super.afterConnectionClosed(theSession, theStatus);
@ -130,10 +131,17 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement
subscriptionChannelWithHandlers.removeHandler(this);
}
private void deliver() {
/**
* Send the payload to the client
*
* @param payload The payload
*/
private void deliver(String payload) {
try {
String payload = "ping " + myActiveSubscription.getId();
// Log it
ourLog.info("Sending WebSocket message: {}", payload);
// Send message
mySession.sendMessage(new TextMessage(payload));
} catch (IOException e) {
handleFailure(e);
@ -145,14 +153,67 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement
if (!(theMessage.getPayload() instanceof ResourceDeliveryMessage)) {
return;
}
try {
ResourceDeliveryMessage msg = (ResourceDeliveryMessage) theMessage.getPayload();
if (myActiveSubscription.getSubscription().equals(msg.getSubscription())) {
deliver();
}
handleSubscriptionPayload(msg);
} catch (Exception e) {
ourLog.error("Failure handling subscription payload", e);
throw new MessagingException(theMessage, Msg.code(6) + "Failure handling subscription payload", e);
handleException(theMessage, e);
}
}
/**
* Handle the subscription payload
*
* @param msg The message
*/
private void handleSubscriptionPayload(ResourceDeliveryMessage msg) {
// Check if the subscription exists and is the same as the active subscription
if (!myActiveSubscription.getSubscription().equals(msg.getSubscription())) {
return;
}
// Default payload
String defaultPayload = "ping " + myActiveSubscription.getId();
String payload = defaultPayload;
// Check if the subscription is a topic subscription
if (msg.getSubscription().isTopicSubscription()) {
// Get the payload by content
payload = getPayloadByContent(msg).orElse(defaultPayload);
}
// Deliver the payload
deliver(payload);
}
/**
* Handle the exception
*
* @param theMessage The message
* @param e The exception
*/
private void handleException(Message<?> theMessage, Exception e) {
ourLog.error("Failure handling subscription payload", e);
throw new MessagingException(theMessage, Msg.code(6) + "Failure handling subscription payload", e);
}
/**
* Get the payload based on the subscription content
*
* @param msg The message
* @return The payload
*/
private Optional<String> getPayloadByContent(ResourceDeliveryMessage msg) {
switch (msg.getSubscription().getContent()) {
case IDONLY:
return Optional.of(msg.getPayloadId());
case FULLRESOURCE:
return Optional.of(msg.getPayloadString());
case EMPTY:
case NULL:
default:
return Optional.empty();
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,6 +1,8 @@
package ca.uhn.fhir.jpa.interceptor;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -10,19 +12,28 @@ import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
import ca.uhn.fhir.jpa.util.SqlQuery;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.provider.BulkDataExportProvider;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.MultimapCollector;
import com.google.common.base.Charsets;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimap;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Enumerations;
@ -36,6 +47,7 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.SpyBean;
import java.io.IOException;
import java.util.List;
@ -49,8 +61,11 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.matchesPattern;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Test {
public static final int ALTERNATE_DEFAULT_ID = -1;
@ -542,6 +557,70 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
return (Patient)update.getResource();
}
@Test
public void testIdentifyForRead_serverOperation_returnsAllPartitions() {
ReadPartitionIdRequestDetails readRequestDetails = ReadPartitionIdRequestDetails.forServerOperation(ProviderConstants.OPERATION_EXPORT);
RequestPartitionId requestPartitionId = mySvc.identifyForRead(readRequestDetails, mySrd);
assertEquals(requestPartitionId, RequestPartitionId.allPartitions());
assertEquals(RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, readRequestDetails.getRestOperationType());
}
@Test
public void testSystemBulkExport_withPatientIdPartitioningWithNoResourceType_usesNonPatientSpecificPartition() throws IOException {
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
try (CloseableHttpResponse postResponse = myServer.getHttpClient().execute(post)){
ourLog.info("Response: {}",postResponse);
assertEquals(202, postResponse.getStatusLine().getStatusCode());
assertEquals("Accepted", postResponse.getStatusLine().getReasonPhrase());
}
}
@Test
public void testSystemBulkExport_withPatientIdPartitioningWithResourceType_exportUsesNonPatientSpecificPartition() throws IOException {
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.addHeader(BulkDataExportProvider.PARAM_EXPORT_TYPE, "Patient");
post.addHeader(BulkDataExportProvider.PARAM_EXPORT_TYPE_FILTER, "Patient?");
try (CloseableHttpResponse postResponse = myServer.getHttpClient().execute(post)){
ourLog.info("Response: {}",postResponse);
assertEquals(202, postResponse.getStatusLine().getStatusCode());
assertEquals("Accepted", postResponse.getStatusLine().getReasonPhrase());
}
}
@Test
public void testSystemBulkExport_withPatientIdPartitioningWithResourceType_pollSuccessful() throws IOException {
final BulkExportJobParameters options = new BulkExportJobParameters();
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.addHeader(BulkDataExportProvider.PARAM_EXPORT_TYPE, "Patient"); // ignored when computing partition
post.addHeader(BulkDataExportProvider.PARAM_EXPORT_TYPE_FILTER, "Patient?");
String locationUrl;
try (CloseableHttpResponse postResponse = myServer.getHttpClient().execute(post)){
ourLog.info("Response: {}",postResponse);
assertEquals(202, postResponse.getStatusLine().getStatusCode());
assertEquals("Accepted", postResponse.getStatusLine().getReasonPhrase());
Header locationHeader = postResponse.getFirstHeader(Constants.HEADER_CONTENT_LOCATION);
assertNotNull(locationHeader);
locationUrl = locationHeader.getValue();
}
HttpGet get = new HttpGet(locationUrl);
try (CloseableHttpResponse postResponse = myServer.getHttpClient().execute(get)) {
String responseContent = IOUtils.toString(postResponse.getEntity().getContent(), Charsets.UTF_8);
ourLog.info("Response: {}", responseContent);
assertEquals(202, postResponse.getStatusLine().getStatusCode());
}
}
@Test
public void testSystemOperation_withNoResourceType_success() throws IOException {
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_EXPORT);

View File

@ -567,12 +567,13 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
MyAnonymousInterceptor1 interceptor1 = new MyAnonymousInterceptor1();
ourRestServer.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED, interceptor1);
MySearchNarrowingInterceptor interceptor2 = new MySearchNarrowingInterceptor();
interceptor2.setNarrowConditionalUrls(true);
ourRestServer.getInterceptorService().registerInterceptor(interceptor2);
try {
myClient.transaction().withBundle(input).execute();
assertEquals(1, counter0.get());
assertEquals(1, counter1.get());
assertEquals(5, counter2.get());
assertEquals(1, counter2.get());
} finally {
ourRestServer.getInterceptorService().unregisterInterceptor(interceptor1);

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,9 +4,11 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
@ -26,7 +28,6 @@ import org.hl7.fhir.r5.model.DateTimeType;
import org.hl7.fhir.r5.model.MedicationRequest;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Observation.ObservationComponentComponent;
import org.hl7.fhir.r5.model.OperationOutcome;
import org.hl7.fhir.r5.model.Organization;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.Patient;
@ -34,14 +35,18 @@ import org.hl7.fhir.r5.model.Quantity;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.comparator.Comparators;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@ -278,7 +283,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
@Test
public void testSearchWithCompositeSort() throws IOException {
IIdType pid0;
IIdType oid1;
IIdType oid2;
@ -294,76 +299,76 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
ObservationComponentComponent comp = obs.addComponent();
CodeableConcept cc = new CodeableConcept();
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
comp.setValue(new Quantity().setValue(200));
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
ourLog.debug("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
ObservationComponentComponent comp = obs.addComponent();
CodeableConcept cc = new CodeableConcept();
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
comp.setValue(new Quantity().setValue(300));
oid2 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
ourLog.debug("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
ObservationComponentComponent comp = obs.addComponent();
CodeableConcept cc = new CodeableConcept();
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
comp.setValue(new Quantity().setValue(150));
oid3 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
ourLog.debug("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
ObservationComponentComponent comp = obs.addComponent();
CodeableConcept cc = new CodeableConcept();
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
comp.setCode(cc);
comp.setValue(new Quantity().setValue(250));
oid4 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
ourLog.debug("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}
String uri = myServerBase + "/Observation?_sort=combo-code-value-quantity";
Bundle found;
HttpGet get = new HttpGet(uri);
try (CloseableHttpResponse resp = ourHttpClient.execute(get)) {
String output = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
found = myFhirCtx.newXmlParser().parseResource(Bundle.class, output);
}
ourLog.debug("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(found));
List<IIdType> list = toUnqualifiedVersionlessIds(found);
assertEquals(4, found.getEntry().size());
assertEquals(oid3, list.get(0));
@ -496,6 +501,66 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
myClient.transaction().withResources(carePlans).execute();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testHistoryPaging(boolean theTypeLevel) {
// Setup
BundleBuilder bb = new BundleBuilder(myFhirContext);
List<String> expectedIdentifiers = new ArrayList<>();
for (int i = 0; i < 500; i++) {
Patient p = new Patient();
String identifier = leftPad(Integer.toString(i), 4, '0');
expectedIdentifiers.add(identifier);
p.addIdentifier().setValue(identifier);
bb.addTransactionCreateEntry(p);
}
ourLog.info("Starting transaction with {} entries...", expectedIdentifiers.size());
mySystemDao.transaction(mySrd, bb.getBundleTyped());
// Test
ourLog.info("Loading type history, expecting identifiers from {} to {}...", expectedIdentifiers.get(0), expectedIdentifiers.get(expectedIdentifiers.size() - 1));
List<String> actualIdentifiers = new ArrayList<>();
Bundle historyBundle;
if (theTypeLevel) {
historyBundle = myClient.history().onType(Patient.class).returnBundle(Bundle.class).execute();
} else {
historyBundle = myClient.history().onServer().returnBundle(Bundle.class).execute();
}
while (true) {
historyBundle
.getEntry()
.stream()
.map(t -> (Patient) t.getResource())
.map(t -> t.getIdentifierFirstRep().getValue())
.forEach(actualIdentifiers::add);
BundleEntryComponent firstEntry = historyBundle.getEntry().get(0);
BundleEntryComponent lastEntry = historyBundle.getEntry().get(historyBundle.getEntry().size() - 1);
ourLog.info("""
Loaded history page:
* First ID[ {} ] LastUpdated: {}
* Last ID[ {} ] LastUpdated: {}""",
((Patient) firstEntry.getResource()).getIdentifierFirstRep().getValue(),
firstEntry.getResource().getMeta().getLastUpdatedElement().getValueAsString(),
((Patient) lastEntry.getResource()).getIdentifierFirstRep().getValue(),
lastEntry.getResource().getMeta().getLastUpdatedElement().getValueAsString()
);
if (historyBundle.getLink(Constants.LINK_NEXT) != null) {
historyBundle = myClient.loadPage().next(historyBundle).execute();
} else {
break;
}
}
// Verify
actualIdentifiers.sort(Comparators.comparable());
assertEquals(expectedIdentifiers, actualIdentifiers);
}
private IIdType createOrganization(String methodName, String s) {
Organization o1 = new Organization();
o1.setName(methodName + s);
@ -543,7 +608,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
protected List<IIdType> toUnqualifiedVersionlessIds(Bundle theFound) {
List<IIdType> retVal = new ArrayList<>();
for (BundleEntryComponent next : theFound.getEntry()) {
if (next.getResource()!= null) {
if (next.getResource() != null) {
retVal.add(next.getResource().getIdElement().toUnqualifiedVersionless());
}
}

View File

@ -0,0 +1,159 @@
package ca.uhn.fhir.jpa.subscription.websocket;
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR5Test;
import ca.uhn.fhir.jpa.test.util.SubscriptionTestUtil;
import ca.uhn.fhir.jpa.util.WebsocketSubscriptionClient;
import ca.uhn.fhir.rest.api.MethodOutcome;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Subscription;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.UUID;
import static org.awaitility.Awaitility.await;
/**
* Test {@link ca.uhn.fhir.jpa.subscription.match.deliver.websocket.SubscriptionWebsocketHandler} with different content types.
*/
public class WebsocketWithSubscriptionIdR5Test extends BaseSubscriptionsR5Test {
private static final Logger ourLog = org.slf4j.LoggerFactory.getLogger(WebsocketWithSubscriptionIdR5Test.class);
@RegisterExtension
private final WebsocketSubscriptionClient myWebsocketClientExtension =
new WebsocketSubscriptionClient(() -> myServer, () -> myStorageSettings);
@Autowired
private SubscriptionTestUtil mySubscriptionTestUtil;
@Override
@BeforeEach
public void before() {
// Register interceptor
mySubscriptionTestUtil.registerWebSocketInterceptor();
mySubscriptionTestUtil.registerSubscriptionLoggingInterceptor();
// Given a subscription topic
SubscriptionTopic subscriptionTopic = new SubscriptionTopic();
subscriptionTopic.setUrl("Topic/123");
subscriptionTopic.setStatus(Enumerations.PublicationStatus.ACTIVE);
SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = subscriptionTopic.addResourceTrigger();
trigger.setResource("Patient");
trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.CREATE);
myClient.create().resource(subscriptionTopic).execute();
}
@Override
@AfterEach
public void after() throws Exception {
// Unregister interceptor
mySubscriptionTestUtil.unregisterSubscriptionInterceptor();
myWebsocketClientExtension.afterEach(null);
}
@Test
public void testSubscriptionMessagePayloadContentIsEmpty() {
// Given a subscription
Subscription subscription = new Subscription();
subscription.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE);
subscription.setContent(Subscription.SubscriptionPayloadContent.fromCode("empty"));
subscription.setTopic("Topic/123");
subscription.getChannelType().setCode("websocket");
MethodOutcome methodOutcome = myClient.create().resource(subscription).execute();
String subscriptionId = methodOutcome.getId().getIdPart();
// When
myWebsocketClientExtension.bind(subscriptionId);
// And
// Trigger resource creation
Patient patient = new Patient();
patient.setActive(true);
myClient.create().resource(patient).execute();
// Then
List<String> messages = myWebsocketClientExtension.getMessages();
await().until(() -> !messages.isEmpty());
// Log it
ourLog.info("Messages: {}", messages);
// Verify a ping message shall be returned
Assertions.assertTrue(messages.contains("ping " + subscriptionId));
}
@Test
public void testSubscriptionMessagePayloadContentIsIdOnly() {
// Given a subscription
Subscription subscription = new Subscription();
subscription.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE);
subscription.setContent(Subscription.SubscriptionPayloadContent.fromCode("id-only"));
subscription.setTopic("Topic/123");
subscription.getChannelType().setCode("websocket");
MethodOutcome methodOutcome = myClient.create().resource(subscription).execute();
String subscriptionId = methodOutcome.getId().getIdPart();
// When
myWebsocketClientExtension.bind(subscriptionId);
// And
// Trigger resource creation
Patient patient = new Patient();
patient.setActive(true);
myClient.create().resource(patient).execute();
// Then
List<String> messages = myWebsocketClientExtension.getMessages();
await().until(() -> messages.size() > 1);
// Log it
ourLog.info("Messages: {}", messages);
// Verify UUID shall be returned
Assertions.assertTrue(messages.contains("bound " + subscriptionId));
Assertions.assertNotNull(UUID.fromString(messages.get(1)));
}
@Test
public void testSubscriptionMessagePayloadContentIsFullResource() {
// Given a subscription
Subscription subscription = new Subscription();
subscription.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE);
subscription.setContent(Subscription.SubscriptionPayloadContent.fromCode("full-resource"));
subscription.setTopic("Topic/123");
subscription.getChannelType().setCode("websocket");
MethodOutcome methodOutcome = myClient.create().resource(subscription).execute();
String subscriptionId = methodOutcome.getId().getIdPart();
// When
myWebsocketClientExtension.bind(subscriptionId);
// And
// Trigger resource creation
Patient patient = new Patient();
patient.setActive(true);
myClient.create().resource(patient).execute();
// Then
List<String> messages = myWebsocketClientExtension.getMessages();
await().until(() -> messages.size() > 1);
// Log it
ourLog.info("Messages: {}", messages);
// Verify Bundle resource shall be returned
Assertions.assertTrue(messages.contains("bound " + subscriptionId));
Assertions.assertNotNull(myFhirContext.newJsonParser().parseResource(messages.get(1)));
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1 @@
# used by FilteringSqlLoggerTest

View File

@ -0,0 +1,339 @@
package ca.uhn.fhir.jpa.logging;
import ca.uhn.test.util.LogbackCaptureTestExtension;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.InjectMocks;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import static ca.uhn.fhir.jpa.logging.SqlLoggerFilteringUtil.FILTER_FILE_PATH;
import static ca.uhn.fhir.jpa.logging.SqlLoggerFilteringUtil.FILTER_UPDATE_INTERVAL_SECS;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItems;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
/**
* This test includes tests for two classes SqlStatementFilteringLogger and SqlLoggerFilteringUtil, because the later
* uses threading in a way that would make these tests would collide with each other if run in parallel, as it could be
* the case if set in different classes. They are separated by top level nested class names.
*/
@ExtendWith(MockitoExtension.class)
public class SqlLoggerFilteringAndUtilTest {
@Nested
public class SqlStatementFilteringLoggerTests {
@Spy
private SqlLoggerFilteringUtil myFilteringUtil;
private SqlStatementFilteringLogger myTestedLogger;
private ch.qos.logback.classic.Logger myLogger;
@BeforeEach
void setUp() {
myTestedLogger= new SqlStatementFilteringLogger(myFilteringUtil);
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
myLogger = loggerContext.getLogger("org.hibernate.SQL");
ch.qos.logback.classic.Logger myTestedclassLogger = loggerContext.getLogger("ca.cdr.api.logging.SqlLoggerFilteringUtil");
myTestedclassLogger.setLevel(Level.toLevel("trace"));
}
@Nested
public class ActivationTests {
@Test
void doesNotInvokeMustLogWhenLoggerNotDebug() {
myLogger.setLevel(Level.toLevel("info"));
myTestedLogger.logStatement("select * from Patients");
verify(myFilteringUtil, never()).allowLog(any());
}
@Test
void invokesMustLogWhenLoggerDebug() {
myLogger.setLevel(Level.toLevel("debug"));
myTestedLogger.logStatement("select * from Patients");
verify(myFilteringUtil).allowLog("select * from Patients");
}
}
@Nested
public class FileFiltersTests {
@RegisterExtension
public LogbackCaptureTestExtension myLogCapture = new LogbackCaptureTestExtension("org.hibernate.SQL");
@BeforeEach
void setUp() {
myLogger.setLevel(Level.toLevel("debug"));
SqlLoggerFilteringUtil.setFilterUpdateIntervalSecs(1);
}
@AfterEach
void tearDown() throws IOException {
clearFilterFile();
}
@Test
void testDynamicFiltersUpdate_forStartsWithFilters() throws IOException {
// starts with empty filter list
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertEquals(3, myLogCapture.getLogEvents().size() );
addLineToFilterFile("sw: 1-must-log");
waitForFiltersRefresh();
myLogCapture.clearEvents();
// log again
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertThat(
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
hasItems(
containsString("2-must-log-this-statement"),
containsString("3-must-log-this-statement")));
addLineToFilterFile("sw: 3-must-log");
waitForFiltersRefresh();
myLogCapture.clearEvents();
// log again
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertThat(
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
hasItems(containsString("2-must-log-this-statement")));
}
@Test
void testDynamicFiltersUpdate_forEqualsWithFilters() throws IOException {
// starts with empty filter list
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertEquals(3, myLogCapture.getLogEvents().size() );
addLineToFilterFile("eq: 1-must-log-this-statement");
waitForFiltersRefresh();
myLogCapture.clearEvents();
// log again
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertThat(
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
hasItems(
containsString("2-must-log-this-statement"),
containsString("3-must-log-this-statement")));
addLineToFilterFile("sw: 3-must-log-this-statement");
waitForFiltersRefresh();
myLogCapture.clearEvents();
// log again
myTestedLogger.logStatement("1-must-log-this-statement");
myTestedLogger.logStatement("2-must-log-this-statement");
myTestedLogger.logStatement("3-must-log-this-statement");
assertThat(
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
hasItems(containsString("2-must-log-this-statement")));
}
}
private void waitForFiltersRefresh() {
int beforeRefreshCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
await().atMost(Duration.of(SqlLoggerFilteringUtil.FILTER_UPDATE_INTERVAL_SECS + 1, ChronoUnit.SECONDS))
.until(() -> SqlLoggerFilteringUtil.getRefreshCountForTests() > beforeRefreshCount);
}
private void addLineToFilterFile(String theFilterLine) throws IOException {
File resource = new ClassPathResource(FILTER_FILE_PATH).getFile();
assertNotNull(resource);
Files.write(resource.toPath(), (theFilterLine + "\n").getBytes(), StandardOpenOption.APPEND);
}
private void clearFilterFile() throws IOException {
File resource = new ClassPathResource(FILTER_FILE_PATH).getFile();
assertNotNull(resource);
Files.write(resource.toPath(), new byte[0], StandardOpenOption.TRUNCATE_EXISTING);
}
}
@Nested
public class SqlLoggerFilteringUtilTests {
@InjectMocks
private SqlLoggerFilteringUtil myTestedUtil;
@Spy
private SqlLoggerFilteringUtil mySpiedUtil;
@RegisterExtension
public LogbackCaptureTestExtension myLogCapture = new LogbackCaptureTestExtension("org.hibernate.SQL");
private ch.qos.logback.classic.Logger myHibernateLogger;
@BeforeEach
void setUp() {
mySpiedUtil = spy(myTestedUtil);
SqlLoggerFilteringUtil.setFilterUpdateIntervalSecs(1);
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
myHibernateLogger = loggerContext.getLogger(SqlLoggerFilteringUtil.class);
myHibernateLogger = loggerContext.getLogger("org.hibernate.SQL");
}
@Nested
public class MustLogTests {
@BeforeEach
void setUp() throws IOException {
lenient().doNothing().when(mySpiedUtil).refreshFilters(any());
myHibernateLogger.setLevel(Level.DEBUG);
}
@Test
void whenNoFilterMatches_mustReturnTrue() {
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a , BBB and ccccc");
assertTrue(result);
}
@Test
void whenStartsWithFilterMatches_mustReturnFalse() {
setFilter("sw: sql statement 1");
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a , BBB and ccccc");
assertFalse(result);
}
@Test
void whenFragmentFilterMatches_mustReturnFalse() {
setFilter("frag: with params a, BBB");
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a, BBB and ccccc");
assertFalse(result);
}
private void setFilter(String theFilterDefinition) {
mySpiedUtil.getSqlLoggerFilters().forEach(f -> f.evaluateFilterLine(theFilterDefinition));
}
}
@Nested
public class ExecutorActivationTests {
@AfterEach
void tearDown() {
// shuts down executor if it was initialized
myHibernateLogger.setLevel(Level.INFO);
waitForRefreshCycle();
assertExecutorState(false);
}
@Test
void testRefreshIsNotExecutedUntilMustLogIsCalled() {
myHibernateLogger.setLevel(Level.INFO);
assertExecutorState(false);
}
@Test
void testRefreshIsExecutedWhenMustLogIsCalled() {
myHibernateLogger.setLevel(Level.DEBUG);
mySpiedUtil.allowLog("sql statement");
assertExecutorState(true);
}
@Test
void testRefreshIsStoppedWhenDebugLogIsStopped() {
// refresh executor is stopped until mustLog is invoked
assertExecutorState(false);
// refresh executor starts once mustLog is called with logger in DEBUG mode
myHibernateLogger.setLevel(Level.DEBUG);
mySpiedUtil.allowLog("sql statement");
assertExecutorState(true);
// refresh executor stops once mustLog is called when logger DEBUG mode is reset
myHibernateLogger.setLevel(Level.INFO);
// wait for refresh cycle, which should stop executor
waitForRefreshCycle();
// validate it stopped refreshing
assertExecutorState(false);
// until log is back to DEBUG
myHibernateLogger.setLevel(Level.DEBUG);
// executor not reactivated until debug log is called
mySpiedUtil.allowLog("sql statement");
waitForRefreshCycle();
// validate started refreshing again
assertExecutorState(true);
}
}
private void waitForRefreshCycle() {
try {
Thread.sleep(((long) FILTER_UPDATE_INTERVAL_SECS + 1) * 1_000);
} catch (InterruptedException ignored) {
// ignore
}
}
private void assertExecutorState(boolean isActive) {
int beforeRefreshCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
if (isActive) {
await().atMost(Duration.of(FILTER_UPDATE_INTERVAL_SECS + 1, ChronoUnit.SECONDS))
.until(() -> beforeRefreshCount < SqlLoggerFilteringUtil.getRefreshCountForTests());
} else {
waitForRefreshCycle();
int newCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
assertEquals(beforeRefreshCount, newCount);
}
}
}
}

View File

@ -0,0 +1,82 @@
package ca.uhn.fhir.jpa.logging;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
@ExtendWith(MockitoExtension.class)
class SqlLoggerStackTraceFilterTest {
private final SqlLoggerStackTraceFilter myTested = new SqlLoggerStackTraceFilter();
private SqlLoggerStackTraceFilter mySpiedTested;
@BeforeEach
void setUp() {
mySpiedTested = spy(myTested);
}
@Test
void noMatch() {
mySpiedTested.setFilterDefinitions(List.of(
"ca.cdr.clustermgr.svc.impl.DatabaseBackedHttpSessionStorageSvcImpl",
"ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache",
"ca.cdr.clustermgr.svc.impl.ModuleStatusControllerSvcImpl",
"ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl"
));
Stream<StackTraceElement> stackTraceStream = Stream.of(
stElement("ca.cdr.api.camel.ICamelRouteEndpointSvc"),
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
stElement("ca.cdr.cdaxv2.impl.CdaDocumentSvcImpl"),
stElement("ca.cdr.endpoint.cdshooks.svc.prefetch.CdsHooksDaoAuthorizationSvc"),
stElement("ca.cdr.endpoint.hl7v2.in.converter.Hl7V2InboundConverter")
);
doReturn(stackTraceStream).when(mySpiedTested).getStackTraceStream();
// execute
boolean matched = mySpiedTested.match("not-used");
assertFalse(matched);
}
@Test
void match() {
mySpiedTested.setFilterDefinitions(List.of(
"ca.cdr.clustermgr.svc.impl.DatabaseBackedHttpSessionStorageSvcImpl",
"ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache",
"ca.cdr.clustermgr.svc.impl.ModuleStatusControllerSvcImpl",
"ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl"
));
Stream<StackTraceElement> stackTraceStream = Stream.of(
stElement("ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache"),
stElement("ca.cdr.api.camel.ICamelRouteEndpointSvc"),
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
stElement("ca.cdr.cdaxv2.impl.CdaDocumentSvcImpl"),
stElement("ca.cdr.endpoint.cdshooks.svc.prefetch.CdsHooksDaoAuthorizationSvc"),
stElement("ca.cdr.endpoint.hl7v2.in.converter.Hl7V2InboundConverter"),
stElement("ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl") // <== match
);
doReturn(stackTraceStream).when(mySpiedTested).getStackTraceStream();
// execute
boolean matched = mySpiedTested.match("not-used");
assertTrue(matched);
}
private StackTraceElement stElement(String theClassName) {
return new StackTraceElement(theClassName, "", null, 0);
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -249,6 +249,24 @@ public abstract class RequestDetails {
public abstract List<String> getHeaders(String name);
/**
* Adds a new header
*
* @param theName The header name
* @param theValue The header value
* @since 7.2.0
*/
public abstract void addHeader(String theName, String theValue);
/**
* Replaces any existing header(s) with the given name using a List of new header values
*
* @param theName The header name
* @param theValue The header value
* @since 7.2.0
*/
public abstract void setHeaders(String theName, List<String> theValue);
public IIdType getId() {
return myId;
}

View File

@ -33,9 +33,9 @@ import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import java.io.IOException;
import java.io.InputStream;
@ -124,13 +124,27 @@ public class SystemRequestDetails extends RequestDetails {
return headers.get(name);
}
@Override
public void addHeader(String theName, String theValue) {
if (myHeaders == null) {
myHeaders = ArrayListMultimap.create();
}
initHeaderMap();
myHeaders.put(theName, theValue);
}
@Override
public void setHeaders(String theName, List<String> theValues) {
initHeaderMap();
myHeaders.putAll(theName, theValues);
}
private void initHeaderMap() {
if (myHeaders == null) {
// Make sure we are case-insensitive on keys
myHeaders = MultimapBuilder.treeKeys(String.CASE_INSENSITIVE_ORDER)
.arrayListValues()
.build();
}
}
@Override
public Object getAttribute(String theAttributeName) {
return null;
@ -145,7 +159,7 @@ public class SystemRequestDetails extends RequestDetails {
}
@Override
public Reader getReader() throws IOException {
public Reader getReader() {
return null;
}

View File

@ -0,0 +1,25 @@
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.rest.server.interceptor;
/**
* Request object for {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_CONDITIONAL_URL_PREPROCESS}
*/
public class ConditionalUrlRequest {}

View File

@ -0,0 +1,25 @@
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.rest.server.interceptor;
/**
* Request object for {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_CONDITIONAL_URL_PREPROCESS}
*/
public class ConditionalUrlResponse {}

View File

@ -30,21 +30,21 @@ import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
import ca.uhn.fhir.rest.server.util.ServletRequestUtil;
import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.ValidateUtil;
import ca.uhn.fhir.util.bundle.ModifiableBundleEntry;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
@ -65,6 +65,8 @@ import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This interceptor can be used to automatically narrow the scope of searches in order to
* automatically restrict the searches to specific compartments.
@ -88,12 +90,26 @@ import java.util.stream.Collectors;
*
* @see AuthorizationInterceptor
*/
@SuppressWarnings("JavadocLinkAsPlainText")
public class SearchNarrowingInterceptor {
public static final String POST_FILTERING_LIST_ATTRIBUTE_NAME =
SearchNarrowingInterceptor.class.getName() + "_POST_FILTERING_LIST";
private IValidationSupport myValidationSupport;
private int myPostFilterLargeValueSetThreshold = 500;
private boolean myNarrowConditionalUrls;
/**
* If set to {@literal true} (default is {@literal false}), conditional URLs such
* as the If-None-Exist header used for Conditional Create operations will
* also be narrowed.
*
* @param theNarrowConditionalUrls Should we narrow conditional URLs in requests
* @since 7.2.0
*/
public void setNarrowConditionalUrls(boolean theNarrowConditionalUrls) {
myNarrowConditionalUrls = theNarrowConditionalUrls;
}
/**
* Supplies a threshold over which any ValueSet-based rules will be applied by
@ -126,6 +142,68 @@ public class SearchNarrowingInterceptor {
return this;
}
/**
* This method handles narrowing for FHIR search/create/update/patch operations.
*
* @see #hookIncomingRequestPreHandled(ServletRequestDetails, HttpServletRequest, HttpServletResponse) This method narrows FHIR transaction bundles
*/
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Hook(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED)
public void hookIncomingRequestPostProcessed(
RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse)
throws AuthenticationException {
// We don't support this operation type yet
RestOperationTypeEnum restOperationType = theRequestDetails.getRestOperationType();
Validate.isTrue(restOperationType != RestOperationTypeEnum.SEARCH_SYSTEM);
switch (restOperationType) {
case EXTENDED_OPERATION_INSTANCE:
case EXTENDED_OPERATION_TYPE: {
if ("$everything".equals(theRequestDetails.getOperation())) {
narrowEverythingOperation(theRequestDetails);
}
break;
}
case SEARCH_TYPE:
narrowTypeSearch(theRequestDetails);
break;
case CREATE:
narrowIfNoneExistHeader(theRequestDetails);
break;
case DELETE:
case UPDATE:
case PATCH:
narrowRequestUrl(theRequestDetails, restOperationType);
break;
}
}
/**
* This method narrows FHIR transaction operations (because this pointcut
* is called after the request body is parsed).
*
* @see #hookIncomingRequestPostProcessed(RequestDetails, HttpServletRequest, HttpServletResponse) This method narrows FHIR search/create/update/etc operations
*/
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
public void hookIncomingRequestPreHandled(
ServletRequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse)
throws AuthenticationException {
if (theRequestDetails.getRestOperationType() != null) {
switch (theRequestDetails.getRestOperationType()) {
case TRANSACTION:
case BATCH:
IBaseBundle bundle = (IBaseBundle) theRequestDetails.getResource();
FhirContext ctx = theRequestDetails.getFhirContext();
BundleEntryUrlProcessor processor = new BundleEntryUrlProcessor(ctx, theRequestDetails);
BundleUtil.processEntries(ctx, bundle, processor);
break;
}
}
}
/**
* Subclasses should override this method to supply the set of compartments that
* the user making the request should actually have access to.
@ -143,54 +221,214 @@ public class SearchNarrowingInterceptor {
return null;
}
@Hook(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED)
public boolean hookIncomingRequestPostProcessed(
RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse)
throws AuthenticationException {
// We don't support this operation type yet
Validate.isTrue(theRequestDetails.getRestOperationType() != RestOperationTypeEnum.SEARCH_SYSTEM);
/**
* For the $everything operation, we only do code narrowing, and in this case
* we're not actually even making any changes to the request. All we do here is
* ensure that an attribute is added to the request, which is picked up later
* by {@link SearchNarrowingConsentService}.
*/
private void narrowEverythingOperation(RequestDetails theRequestDetails) {
AuthorizedList authorizedList = buildAuthorizedList(theRequestDetails);
if (authorizedList != null) {
buildParameterListForAuthorizedCodes(
theRequestDetails, theRequestDetails.getResourceName(), authorizedList);
}
}
private void narrowIfNoneExistHeader(RequestDetails theRequestDetails) {
if (myNarrowConditionalUrls) {
String ifNoneExist = theRequestDetails.getHeader(Constants.HEADER_IF_NONE_EXIST);
if (isNotBlank(ifNoneExist)) {
String newConditionalUrl = narrowConditionalUrlForCompartmentOnly(
theRequestDetails, ifNoneExist, true, theRequestDetails.getResourceName());
if (newConditionalUrl != null) {
theRequestDetails.setHeaders(Constants.HEADER_IF_NONE_EXIST, List.of(newConditionalUrl));
}
}
}
}
private void narrowRequestUrl(RequestDetails theRequestDetails, RestOperationTypeEnum theRestOperationType) {
if (myNarrowConditionalUrls) {
String conditionalUrl = theRequestDetails.getConditionalUrl(theRestOperationType);
if (isNotBlank(conditionalUrl)) {
String newConditionalUrl = narrowConditionalUrlForCompartmentOnly(
theRequestDetails, conditionalUrl, false, theRequestDetails.getResourceName());
if (newConditionalUrl != null) {
String newCompleteUrl = theRequestDetails
.getCompleteUrl()
.substring(
0,
theRequestDetails.getCompleteUrl().indexOf('?') + 1)
+ newConditionalUrl;
theRequestDetails.setCompleteUrl(newCompleteUrl);
}
}
}
}
/**
* Does not narrow codes
*/
@Nullable
private String narrowConditionalUrlForCompartmentOnly(
RequestDetails theRequestDetails,
@Nonnull String theConditionalUrl,
boolean theIncludeUpToQuestionMarkInResponse,
String theResourceName) {
AuthorizedList authorizedList = buildAuthorizedList(theRequestDetails);
return narrowConditionalUrl(
theRequestDetails,
theConditionalUrl,
theIncludeUpToQuestionMarkInResponse,
theResourceName,
false,
authorizedList);
}
@Nullable
private String narrowConditionalUrl(
RequestDetails theRequestDetails,
@Nonnull String theConditionalUrl,
boolean theIncludeUpToQuestionMarkInResponse,
String theResourceName,
boolean theNarrowCodes,
AuthorizedList theAuthorizedList) {
if (theAuthorizedList == null) {
return null;
}
ListMultimap<String, String> parametersToAdd =
buildParameterListForAuthorizedCompartment(theRequestDetails, theResourceName, theAuthorizedList);
if (theNarrowCodes) {
ListMultimap<String, String> parametersToAddForCodes =
buildParameterListForAuthorizedCodes(theRequestDetails, theResourceName, theAuthorizedList);
if (parametersToAdd == null) {
parametersToAdd = parametersToAddForCodes;
} else if (parametersToAddForCodes != null) {
parametersToAdd.putAll(parametersToAddForCodes);
}
}
String newConditionalUrl = null;
if (parametersToAdd != null) {
String query = theConditionalUrl;
int qMarkIndex = theConditionalUrl.indexOf('?');
if (qMarkIndex != -1) {
query = theConditionalUrl.substring(qMarkIndex + 1);
}
Map<String, String[]> inputParams = UrlUtil.parseQueryString(query);
Map<String, String[]> newParameters = applyCompartmentParameters(parametersToAdd, true, inputParams);
StringBuilder newUrl = new StringBuilder();
if (theIncludeUpToQuestionMarkInResponse) {
newUrl.append(qMarkIndex != -1 ? theConditionalUrl.substring(0, qMarkIndex + 1) : "?");
}
boolean first = true;
for (Map.Entry<String, String[]> nextEntry : newParameters.entrySet()) {
for (String nextValue : nextEntry.getValue()) {
if (isNotBlank(nextValue)) {
if (first) {
first = false;
} else {
newUrl.append("&");
}
newUrl.append(UrlUtil.escapeUrlParam(nextEntry.getKey()));
newUrl.append("=");
newUrl.append(UrlUtil.escapeUrlParam(nextValue));
}
}
}
newConditionalUrl = newUrl.toString();
}
return newConditionalUrl;
}
private void narrowTypeSearch(RequestDetails theRequestDetails) {
// N.B do not add code above this for filtering, this should only ever occur on search.
if (shouldSkipNarrowing(theRequestDetails)) {
return true;
return;
}
AuthorizedList authorizedList = buildAuthorizedList(theRequestDetails);
if (authorizedList == null) {
return true;
return;
}
// Add rules to request so that the SearchNarrowingConsentService can pick them up
String resourceName = theRequestDetails.getResourceName();
// Narrow request URL for compartments
ListMultimap<String, String> parametersToAdd =
buildParameterListForAuthorizedCompartment(theRequestDetails, resourceName, authorizedList);
if (parametersToAdd != null) {
applyParametersToRequestDetails(theRequestDetails, parametersToAdd, true);
}
// Narrow request URL for codes - Add rules to request so that the SearchNarrowingConsentService can pick them
// up
ListMultimap<String, String> parameterToOrValues =
buildParameterListForAuthorizedCodes(theRequestDetails, resourceName, authorizedList);
if (parameterToOrValues != null) {
applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, false);
}
}
@Nullable
private ListMultimap<String, String> buildParameterListForAuthorizedCodes(
RequestDetails theRequestDetails, String resourceName, AuthorizedList authorizedList) {
List<AllowedCodeInValueSet> postFilteringList = getPostFilteringList(theRequestDetails);
if (authorizedList.getAllowedCodeInValueSets() != null) {
postFilteringList.addAll(authorizedList.getAllowedCodeInValueSets());
}
List<AllowedCodeInValueSet> allowedCodeInValueSet = authorizedList.getAllowedCodeInValueSets();
ListMultimap<String, String> parameterToOrValues = null;
if (allowedCodeInValueSet != null) {
FhirContext context = theRequestDetails.getServer().getFhirContext();
RuntimeResourceDefinition resourceDef = context.getResourceDefinition(resourceName);
parameterToOrValues = processAllowedCodes(resourceDef, allowedCodeInValueSet);
}
return parameterToOrValues;
}
@Nullable
private ListMultimap<String, String> buildParameterListForAuthorizedCompartment(
RequestDetails theRequestDetails, String theResourceName, @Nullable AuthorizedList theAuthorizedList) {
if (theAuthorizedList == null) {
return null;
}
FhirContext ctx = theRequestDetails.getServer().getFhirContext();
RuntimeResourceDefinition resDef = ctx.getResourceDefinition(theRequestDetails.getResourceName());
RuntimeResourceDefinition resDef = ctx.getResourceDefinition(theResourceName);
/*
* Create a map of search parameter values that need to be added to the
* given request
*/
Collection<String> compartments = authorizedList.getAllowedCompartments();
Collection<String> compartments = theAuthorizedList.getAllowedCompartments();
ListMultimap<String, String> parametersToAdd = null;
if (compartments != null) {
Map<String, List<String>> parameterToOrValues =
processResourcesOrCompartments(theRequestDetails, resDef, compartments, true);
applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, true);
}
Collection<String> resources = authorizedList.getAllowedInstances();
if (resources != null) {
Map<String, List<String>> parameterToOrValues =
processResourcesOrCompartments(theRequestDetails, resDef, resources, false);
applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, true);
}
List<AllowedCodeInValueSet> allowedCodeInValueSet = authorizedList.getAllowedCodeInValueSets();
if (allowedCodeInValueSet != null) {
Map<String, List<String>> parameterToOrValues = processAllowedCodes(resDef, allowedCodeInValueSet);
applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, false);
parametersToAdd =
processResourcesOrCompartments(theRequestDetails, resDef, compartments, true, theResourceName);
}
return true;
Collection<String> resources = theAuthorizedList.getAllowedInstances();
if (resources != null) {
ListMultimap<String, String> parameterToOrValues =
processResourcesOrCompartments(theRequestDetails, resDef, resources, false, theResourceName);
if (parametersToAdd == null) {
parametersToAdd = parameterToOrValues;
} else if (parameterToOrValues != null) {
parametersToAdd.putAll(parameterToOrValues);
}
}
return parametersToAdd;
}
/**
@ -201,102 +439,26 @@ public class SearchNarrowingInterceptor {
&& !"$everything".equalsIgnoreCase(theRequestDetails.getOperation());
}
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
public void hookIncomingRequestPreHandled(
ServletRequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse)
throws AuthenticationException {
if (theRequestDetails.getRestOperationType() != RestOperationTypeEnum.TRANSACTION) {
return;
}
IBaseBundle bundle = (IBaseBundle) theRequestDetails.getResource();
FhirContext ctx = theRequestDetails.getFhirContext();
BundleEntryUrlProcessor processor =
new BundleEntryUrlProcessor(ctx, theRequestDetails, theRequest, theResponse);
BundleUtil.processEntries(ctx, bundle, processor);
}
private void applyParametersToRequestDetails(
RequestDetails theRequestDetails,
@Nullable Map<String, List<String>> theParameterToOrValues,
@Nullable ListMultimap<String, String> theParameterToOrValues,
boolean thePatientIdMode) {
Map<String, String[]> inputParameters = theRequestDetails.getParameters();
if (theParameterToOrValues != null) {
Map<String, String[]> newParameters = new HashMap<>(theRequestDetails.getParameters());
for (Map.Entry<String, List<String>> nextEntry : theParameterToOrValues.entrySet()) {
String nextParamName = nextEntry.getKey();
List<String> nextAllowedValues = nextEntry.getValue();
if (!newParameters.containsKey(nextParamName)) {
/*
* If we don't already have a parameter of the given type, add one
*/
String nextValuesJoined = ParameterUtil.escapeAndJoinOrList(nextAllowedValues);
String[] paramValues = {nextValuesJoined};
newParameters.put(nextParamName, paramValues);
} else {
/*
* If the client explicitly requested the given parameter already, we'll
* just update the request to have the intersection of the values that the client
* requested, and the values that the user is allowed to see
*/
String[] existingValues = newParameters.get(nextParamName);
if (thePatientIdMode) {
List<String> nextAllowedValueIds = nextAllowedValues.stream()
.map(t -> t.lastIndexOf("/") > -1 ? t.substring(t.lastIndexOf("/") + 1) : t)
.collect(Collectors.toList());
boolean restrictedExistingList = false;
for (int i = 0; i < existingValues.length; i++) {
String nextExistingValue = existingValues[i];
List<String> nextRequestedValues =
QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextExistingValue);
List<String> nextPermittedValues = ListUtils.union(
ListUtils.intersection(nextRequestedValues, nextAllowedValues),
ListUtils.intersection(nextRequestedValues, nextAllowedValueIds));
if (nextPermittedValues.size() > 0) {
restrictedExistingList = true;
existingValues[i] = ParameterUtil.escapeAndJoinOrList(nextPermittedValues);
}
}
/*
* If none of the values that were requested by the client overlap at all
* with the values that the user is allowed to see, the client shouldn't
* get *any* results back. We return an error code indicating that the
* caller is forbidden from accessing the resources they requested.
*/
if (!restrictedExistingList) {
throw new ForbiddenOperationException(Msg.code(2026) + "Value not permitted for parameter "
+ UrlUtil.escapeUrlParam(nextParamName));
}
} else {
int existingValuesCount = existingValues.length;
String[] newValues =
Arrays.copyOf(existingValues, existingValuesCount + nextAllowedValues.size());
for (int i = 0; i < nextAllowedValues.size(); i++) {
newValues[existingValuesCount + i] = nextAllowedValues.get(i);
}
newParameters.put(nextParamName, newValues);
}
}
}
Map<String, String[]> newParameters =
applyCompartmentParameters(theParameterToOrValues, thePatientIdMode, inputParameters);
theRequestDetails.setParameters(newParameters);
}
}
@Nullable
private Map<String, List<String>> processResourcesOrCompartments(
private ListMultimap<String, String> processResourcesOrCompartments(
RequestDetails theRequestDetails,
RuntimeResourceDefinition theResDef,
Collection<String> theResourcesOrCompartments,
boolean theAreCompartments) {
Map<String, List<String>> retVal = null;
boolean theAreCompartments,
String theResourceName) {
ListMultimap<String, String> retVal = null;
String lastCompartmentName = null;
String lastSearchParamName = null;
@ -315,7 +477,7 @@ public class SearchNarrowingInterceptor {
} else {
if (compartmentName.equalsIgnoreCase(theRequestDetails.getResourceName())) {
if (compartmentName.equalsIgnoreCase(theResourceName)) {
searchParamName = "_id";
@ -331,10 +493,9 @@ public class SearchNarrowingInterceptor {
if (searchParamName != null) {
if (retVal == null) {
retVal = new HashMap<>();
retVal = MultimapBuilder.hashKeys().arrayListValues().build();
}
List<String> orValues = retVal.computeIfAbsent(searchParamName, t -> new ArrayList<>());
orValues.add(nextCompartment);
retVal.put(searchParamName, nextCompartment);
}
}
@ -342,9 +503,9 @@ public class SearchNarrowingInterceptor {
}
@Nullable
private Map<String, List<String>> processAllowedCodes(
private ListMultimap<String, String> processAllowedCodes(
RuntimeResourceDefinition theResDef, List<AllowedCodeInValueSet> theAllowedCodeInValueSet) {
Map<String, List<String>> retVal = null;
ListMultimap<String, String> retVal = null;
for (AllowedCodeInValueSet next : theAllowedCodeInValueSet) {
String resourceName = next.getResourceName();
@ -371,9 +532,9 @@ public class SearchNarrowingInterceptor {
}
if (retVal == null) {
retVal = new HashMap<>();
retVal = MultimapBuilder.hashKeys().arrayListValues().build();
}
retVal.computeIfAbsent(paramName, k -> new ArrayList<>()).add(valueSetUrl);
retVal.put(paramName, valueSetUrl);
}
return retVal;
@ -408,7 +569,7 @@ public class SearchNarrowingInterceptor {
Set<String> queryParameters = theRequestDetails.getParameters().keySet();
List<RuntimeSearchParam> searchParams = theResDef.getSearchParamsForCompartmentName(compartmentName);
if (searchParams.size() > 0) {
if (!searchParams.isEmpty()) {
// Resources like Observation have several fields that add the resource to
// the compartment. In the case of Observation, it's subject, patient and performer.
@ -467,41 +628,75 @@ public class SearchNarrowingInterceptor {
}
}
private class BundleEntryUrlProcessor implements Consumer<ModifiableBundleEntry> {
private final FhirContext myFhirContext;
private final ServletRequestDetails myRequestDetails;
private final HttpServletRequest myRequest;
private final HttpServletResponse myResponse;
@Nonnull
private static Map<String, String[]> applyCompartmentParameters(
@Nonnull ListMultimap<String, String> theParameterToOrValues,
boolean thePatientIdMode,
Map<String, String[]> theInputParameters) {
Map<String, String[]> newParameters = new HashMap<>(theInputParameters);
for (String nextParamName : theParameterToOrValues.keySet()) {
List<String> nextAllowedValues = theParameterToOrValues.get(nextParamName);
public BundleEntryUrlProcessor(
FhirContext theFhirContext,
ServletRequestDetails theRequestDetails,
HttpServletRequest theRequest,
HttpServletResponse theResponse) {
myFhirContext = theFhirContext;
myRequestDetails = theRequestDetails;
myRequest = theRequest;
myResponse = theResponse;
}
@Override
public void accept(ModifiableBundleEntry theModifiableBundleEntry) {
ArrayListMultimap<String, String> paramValues = ArrayListMultimap.create();
String url = theModifiableBundleEntry.getRequestUrl();
ServletSubRequestDetails subServletRequestDetails =
ServletRequestUtil.getServletSubRequestDetails(myRequestDetails, url, paramValues);
BaseMethodBinding method =
subServletRequestDetails.getServer().determineResourceMethod(subServletRequestDetails, url);
RestOperationTypeEnum restOperationType = method.getRestOperationType();
subServletRequestDetails.setRestOperationType(restOperationType);
hookIncomingRequestPostProcessed(subServletRequestDetails, myRequest, myResponse);
theModifiableBundleEntry.setRequestUrl(
myFhirContext, ServletRequestUtil.extractUrl(subServletRequestDetails));
if (!newParameters.containsKey(nextParamName)) {
/*
* If we don't already have a parameter of the given type, add one
*/
String nextValuesJoined = ParameterUtil.escapeAndJoinOrList(nextAllowedValues);
String[] paramValues = {nextValuesJoined};
newParameters.put(nextParamName, paramValues);
} else {
/*
* If the client explicitly requested the given parameter already, we'll
* just update the request to have the intersection of the values that the client
* requested, and the values that the user is allowed to see
*/
String[] existingValues = newParameters.get(nextParamName);
if (thePatientIdMode) {
List<String> nextAllowedValueIds = nextAllowedValues.stream()
.map(t -> t.lastIndexOf("/") > -1 ? t.substring(t.lastIndexOf("/") + 1) : t)
.collect(Collectors.toList());
boolean restrictedExistingList = false;
for (int i = 0; i < existingValues.length; i++) {
String nextExistingValue = existingValues[i];
List<String> nextRequestedValues =
QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextExistingValue);
List<String> nextPermittedValues = ListUtils.union(
ListUtils.intersection(nextRequestedValues, nextAllowedValues),
ListUtils.intersection(nextRequestedValues, nextAllowedValueIds));
if (!nextPermittedValues.isEmpty()) {
restrictedExistingList = true;
existingValues[i] = ParameterUtil.escapeAndJoinOrList(nextPermittedValues);
}
}
/*
* If none of the values that were requested by the client overlap at all
* with the values that the user is allowed to see, the client shouldn't
* get *any* results back. We return an error code indicating that the
* caller is forbidden from accessing the resources they requested.
*/
if (!restrictedExistingList) {
throw new ForbiddenOperationException(Msg.code(2026) + "Value not permitted for parameter "
+ UrlUtil.escapeUrlParam(nextParamName));
}
} else {
int existingValuesCount = existingValues.length;
String[] newValues = Arrays.copyOf(existingValues, existingValuesCount + nextAllowedValues.size());
for (int i = 0; i < nextAllowedValues.size(); i++) {
newValues[existingValuesCount + i] = nextAllowedValues.get(i);
}
newParameters.put(nextParamName, newValues);
}
}
}
return newParameters;
}
static List<AllowedCodeInValueSet> getPostFilteringList(RequestDetails theRequestDetails) {
@ -517,4 +712,82 @@ public class SearchNarrowingInterceptor {
static List<AllowedCodeInValueSet> getPostFilteringListOrNull(RequestDetails theRequestDetails) {
return (List<AllowedCodeInValueSet>) theRequestDetails.getAttribute(POST_FILTERING_LIST_ATTRIBUTE_NAME);
}
private class BundleEntryUrlProcessor implements Consumer<ModifiableBundleEntry> {
private final FhirContext myFhirContext;
private final ServletRequestDetails myRequestDetails;
private final AuthorizedList myAuthorizedList;
public BundleEntryUrlProcessor(FhirContext theFhirContext, ServletRequestDetails theRequestDetails) {
myFhirContext = theFhirContext;
myRequestDetails = theRequestDetails;
myAuthorizedList = buildAuthorizedList(theRequestDetails);
}
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Override
public void accept(ModifiableBundleEntry theModifiableBundleEntry) {
if (myAuthorizedList == null) {
return;
}
RequestTypeEnum method = theModifiableBundleEntry.getRequestMethod();
String requestUrl = theModifiableBundleEntry.getRequestUrl();
if (method != null && isNotBlank(requestUrl)) {
String resourceType = UrlUtil.parseUrl(requestUrl).getResourceType();
switch (method) {
case GET: {
String existingRequestUrl = theModifiableBundleEntry.getRequestUrl();
String newConditionalUrl = narrowConditionalUrl(
myRequestDetails, existingRequestUrl, false, resourceType, true, myAuthorizedList);
if (isNotBlank(newConditionalUrl)) {
newConditionalUrl = resourceType + "?" + newConditionalUrl;
theModifiableBundleEntry.setRequestUrl(myFhirContext, newConditionalUrl);
}
break;
}
case POST: {
if (myNarrowConditionalUrls) {
String existingConditionalUrl = theModifiableBundleEntry.getConditionalUrl();
if (isNotBlank(existingConditionalUrl)) {
String newConditionalUrl = narrowConditionalUrl(
myRequestDetails,
existingConditionalUrl,
true,
resourceType,
false,
myAuthorizedList);
if (isNotBlank(newConditionalUrl)) {
theModifiableBundleEntry.setRequestIfNoneExist(myFhirContext, newConditionalUrl);
}
}
}
break;
}
case PUT:
case DELETE:
case PATCH: {
if (myNarrowConditionalUrls) {
String existingConditionalUrl = theModifiableBundleEntry.getConditionalUrl();
if (isNotBlank(existingConditionalUrl)) {
String newConditionalUrl = narrowConditionalUrl(
myRequestDetails,
existingConditionalUrl,
true,
resourceType,
false,
myAuthorizedList);
if (isNotBlank(newConditionalUrl)) {
theModifiableBundleEntry.setRequestUrl(myFhirContext, newConditionalUrl);
}
}
}
break;
}
}
}
}
}
}

View File

@ -28,6 +28,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import jakarta.annotation.Nonnull;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
@ -59,6 +61,7 @@ public class ServletRequestDetails extends RequestDetails {
private RestfulServer myServer;
private HttpServletRequest myServletRequest;
private HttpServletResponse myServletResponse;
private ListMultimap<String, String> myHeaders;
/**
* Constructor for testing only
@ -129,17 +132,63 @@ public class ServletRequestDetails extends RequestDetails {
@Override
public String getHeader(String name) {
// For efficiency, we only make a copy of the request headers if we need to
// modify them
if (myHeaders != null) {
List<String> values = myHeaders.get(name);
if (values.isEmpty()) {
return null;
} else {
return values.get(0);
}
}
return getServletRequest().getHeader(name);
}
@Override
public List<String> getHeaders(String name) {
// For efficiency, we only make a copy of the request headers if we need to
// modify them
if (myHeaders != null) {
return myHeaders.get(name);
}
Enumeration<String> headers = getServletRequest().getHeaders(name);
return headers == null
? Collections.emptyList()
: Collections.list(getServletRequest().getHeaders(name));
}
@Override
public void addHeader(String theName, String theValue) {
initHeaders();
myHeaders.put(theName, theValue);
}
@Override
public void setHeaders(String theName, List<String> theValue) {
initHeaders();
myHeaders.removeAll(theName);
myHeaders.putAll(theName, theValue);
}
private void initHeaders() {
if (myHeaders == null) {
// Make sure we are case-insensitive for header names
myHeaders = MultimapBuilder.treeKeys(String.CASE_INSENSITIVE_ORDER)
.arrayListValues()
.build();
Enumeration<String> headerNames = getServletRequest().getHeaderNames();
while (headerNames.hasMoreElements()) {
String nextName = headerNames.nextElement();
Enumeration<String> values = getServletRequest().getHeaders(nextName);
while (values.hasMoreElements()) {
myHeaders.put(nextName, values.nextElement());
}
}
}
}
@Override
public Object getAttribute(String theAttributeName) {
Validate.notBlank(theAttributeName, "theAttributeName must not be null or blank");

View File

@ -19,34 +19,38 @@
*/
package ca.uhn.fhir.rest.server.servlet;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import jakarta.annotation.Nonnull;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ServletSubRequestDetails extends ServletRequestDetails {
private final ServletRequestDetails myWrap;
private Map<String, List<String>> myHeaders = new HashMap<>();
/**
* Map with case-insensitive keys
*/
private final ListMultimap<String, String> myHeaders = MultimapBuilder.treeKeys(String.CASE_INSENSITIVE_ORDER)
.arrayListValues()
.build();
/**
* Constructor
*
* @param theRequestDetails The parent request details
*/
public ServletSubRequestDetails(ServletRequestDetails theRequestDetails) {
public ServletSubRequestDetails(@Nonnull ServletRequestDetails theRequestDetails) {
super(theRequestDetails.getInterceptorBroadcaster());
myWrap = theRequestDetails;
if (theRequestDetails != null) {
Map<String, List<String>> headers = theRequestDetails.getHeaders();
for (Map.Entry<String, List<String>> next : headers.entrySet()) {
myHeaders.put(next.getKey().toLowerCase(), next.getValue());
}
Map<String, List<String>> headers = theRequestDetails.getHeaders();
for (Map.Entry<String, List<String>> next : headers.entrySet()) {
myHeaders.putAll(next.getKey(), next.getValue());
}
}
@ -60,16 +64,15 @@ public class ServletSubRequestDetails extends ServletRequestDetails {
return myWrap.getServletResponse();
}
@Override
public void addHeader(String theName, String theValue) {
String lowerCase = theName.toLowerCase();
List<String> list = myHeaders.computeIfAbsent(lowerCase, k -> new ArrayList<>());
list.add(theValue);
myHeaders.put(theName, theValue);
}
@Override
public String getHeader(String theName) {
List<String> list = myHeaders.get(theName.toLowerCase());
if (list == null || list.isEmpty()) {
List<String> list = myHeaders.get(theName);
if (list.isEmpty()) {
return null;
}
return list.get(0);
@ -78,7 +81,7 @@ public class ServletSubRequestDetails extends ServletRequestDetails {
@Override
public List<String> getHeaders(String theName) {
List<String> list = myHeaders.get(theName.toLowerCase());
if (list == null || list.isEmpty()) {
if (list.isEmpty()) {
return null;
}
return list;

View File

@ -68,22 +68,4 @@ public class ServletRequestUtil {
theRequestDetails.getServer().populateRequestDetailsFromRequestPath(requestDetails, url);
return requestDetails;
}
public static String extractUrl(ServletRequestDetails theRequestDetails) {
StringBuilder b = new StringBuilder();
for (Map.Entry<String, String[]> next :
theRequestDetails.getParameters().entrySet()) {
for (String nextValue : next.getValue()) {
if (b.length() == 0) {
b.append('?');
} else {
b.append('&');
}
b.append(UrlUtil.escapeUrlParam(next.getKey()));
b.append('=');
b.append(UrlUtil.escapeUrlParam(nextValue));
}
}
return theRequestDetails.getRequestPath() + b.toString();
}
}

View File

@ -1,16 +1,32 @@
package ca.uhn.fhir.rest.server.servlet;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.collections4.iterators.IteratorEnumeration;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;
import jakarta.servlet.http.HttpServletRequest;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Enumeration;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class ServletRequestDetailsTest {
@Mock
private HttpServletRequest myHttpServletRequest;
@Test
public void testRewriteHistoryHeader() {
ServletRequestDetails servletRequestDetails = new ServletRequestDetails();
@ -41,4 +57,24 @@ class ServletRequestDetailsTest {
assertFalse(servletRequestDetails.isRewriteHistory());
}
@Test
public void testAddHeader() {
ServletRequestDetails srd = new ServletRequestDetails();
srd.setServletRequest(myHttpServletRequest);
when(myHttpServletRequest.getHeaderNames()).thenReturn(new IteratorEnumeration<>(List.of("Foo").iterator()));
when(myHttpServletRequest.getHeaders(eq("Foo"))).thenReturn(new IteratorEnumeration<>(List.of("Bar", "Baz").iterator()));
srd.addHeader("Name", "Value");
srd.addHeader("Name", "Value2");
// Verify added headers (make sure we're case insensitive)
assertEquals("Value", srd.getHeader("NAME"));
assertThat(srd.getHeaders("name"), Matchers.contains("Value", "Value2"));
// Verify original headers (make sure we're case insensitive)
assertEquals("Bar", srd.getHeader("FOO"));
assertThat(srd.getHeaders("foo"), Matchers.contains("Bar", "Baz"));
}
}

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-caching-api</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>hapi-deployable-pom</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.1.3-SNAPSHOT</version>
<version>7.1.5-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More