diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0d85e48bba4..5756e093c49 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -59,6 +59,8 @@ stages: module: hapi-fhir-jpaserver-elastic-test-utilities - name: hapi_fhir_jpaserver_ips module: hapi-fhir-jpaserver-ips + - name: hapi_fhir_jpaserver_hfql + module: hapi-fhir-jpaserver-hfql - name: hapi_fhir_jpaserver_mdm module: hapi-fhir-jpaserver-mdm - name: hapi_fhir_jpaserver_model diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 3221bdf9e59..b8c8d696619 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 1b66411c122..13c3ac675f9 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index af145ce4837..ffda99b7c56 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java index 65d2a889333..9aa36f2eb70 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java @@ -29,8 +29,8 @@ public class FhirPathExecutionException extends InternalErrorException { private static final long serialVersionUID = 1L; - public FhirPathExecutionException(Throwable theCause) { - super(theCause); + public FhirPathExecutionException(String theMessage, Throwable theCause) { + super(theMessage, theCause); } public FhirPathExecutionException(String theMessage) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java index 59d3920b1cc..67fc8ce5ff3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java @@ -37,20 +37,55 @@ public interface IFhirPath { */ List evaluate(IBase theInput, String thePath, Class theReturnType); + /** + * Apply the given FhirPath expression against the given input and return + * all results in a list. Unlike the {@link #evaluate(IBase, String, Class)} method which + * uses a String containing a FHIRPath expression, this method takes a parsed FHIRPath + * expression returned by the {@link #parse(String)} method. This has the advantage + * of avoiding re-parsing expressions if the same expression will be evaluated + * repeatedly. + * + * @param theInput The input object (generally a resource or datatype) + * @param theParsedExpression A parsed FHIRPath expression returned by {@link #parse(String)} + * @param theReturnType The type to return (in order to avoid casting) + * @since 6.8.0 + */ + List evaluate(IBase theInput, IParsedExpression theParsedExpression, Class theReturnType); + /** * Apply the given FhirPath expression against the given input and return * the first match (if any) * - * @param theInput The input object (generally a resource or datatype) - * @param thePath The fluent path expression + * @param theInput The input object (generally a resource or datatype) + * @param thePath The fluent path expression * @param theReturnType The type to return (in order to avoid casting) */ Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType); /** - * Parses the expression and throws an exception if it can not parse correctly + * Apply the given FhirPath expression against the given input and return + * the first match (if any). Unlike the {@link #evaluateFirst(IBase, String, Class)} method which + * uses a String containing a FHIRPath expression, this method takes a parsed FHIRPath + * expression returned by the {@link #parse(String)} method. This has the advantage + * of avoiding re-parsing expressions if the same expression will be evaluated + * repeatedly. + * + * @param theInput The input object (generally a resource or datatype) + * @param theParsedExpression A parsed FHIRPath expression returned by {@link #parse(String)} + * @param theReturnType The type to return (in order to avoid casting) + * @since 6.8.0 */ - void parse(String theExpression) throws Exception; + Optional evaluateFirst( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType); + + /** + * Parses the expression and throws an exception if it can not parse correctly. + * Note that the return type from this method is intended to be a "black box". It can + * be passed back into the {@link #evaluate(IBase, IParsedExpression, Class)} + * method on any FHIRPath instance that comes from the same {@link ca.uhn.fhir.context.FhirContext} + * instance. Any other use will produce unspecified results. + */ + IParsedExpression parse(String theExpression) throws Exception; /** * This method can be used optionally to supply an evaluation context for the @@ -61,4 +96,23 @@ public interface IFhirPath { * @since 6.4.0 */ void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext); + + /** + * This interface is a marker interface representing a parsed FHIRPath expression. + * Instances of this class will be returned by {@link #parse(String)} and can be + * passed to {@link #evaluate(IBase, IParsedExpression, Class)} and + * {@link #evaluateFirst(IBase, IParsedExpression, Class)}. Using a pre-parsed + * FHIRPath expression can perform much faster in some situations where an + * identical expression will be evaluated many times against different targets, + * since the parsing step doesn't need to be repeated. + *

+ * Instances of this interface should be treated as a "black box". There are no + * methods that can be used to manipulate parsed FHIRPath expressions. + *

+ * + * @since 6.8.0 + */ + interface IParsedExpression { + // no methods + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java index d1293db973f..abec5ca2ef2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java @@ -40,6 +40,9 @@ public interface IOperationUntypedWithInput extends IClientExecutablePatient/NNN/$everything) which return a bundle instead of * a Parameters resource. + *

+ * Passing in {@literal Binary.class} allows any arbitrary response to be returned. Any payload at + * all will be read as raw bytes into a Binary resource. */ IOperationUntypedWithInput returnResourceType(Class theReturnType); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTypeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTypeUtil.java index b9ef2662d33..031d2c63c31 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTypeUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTypeUtil.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Core Library + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.util; public final class FhirTypeUtil { diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index f9a2db6aae1..90901b7a0fc 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -84,6 +84,11 @@ hapi-fhir-jpaserver-ips ${project.version} + + ${project.groupId} + hapi-fhir-jpaserver-hfql + ${project.version} + ${project.groupId} hapi-fhir-jpaserver-mdm diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index 9ce73f15e78..a6c57313fff 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index c74f435b37b..b6140c04f43 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index a99a671b074..c303962d636 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index ae9231aa860..05af3145b01 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index fc7e460f746..734e86fed8e 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 346417a3e88..c70aa8e228e 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java index 59aa5b066ec..97ab8f757f6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java @@ -136,6 +136,7 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseConformance; import org.hl7.fhir.instance.model.api.IBaseDatatype; @@ -1422,7 +1423,11 @@ public class GenericClient extends BaseClient implements IGenericClient { if (myReturnResourceType != null) { ResourceResponseHandler handler; - handler = new ResourceResponseHandler(myReturnResourceType); + if (IBaseBinary.class.isAssignableFrom(myReturnResourceType)) { + handler = new ResourceOrBinaryResponseHandler(); + } else { + handler = new ResourceResponseHandler(myReturnResourceType); + } Object retVal = invoke(null, handler, invocation); return retVal; } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java index 8b62c883546..3f5d07f83c3 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java @@ -19,9 +19,6 @@ */ package ca.uhn.fhir.rest.client.impl; -import ca.uhn.fhir.rest.client.api.IBasicClient; -import ca.uhn.fhir.rest.client.api.IClientInterceptor; -import ca.uhn.fhir.rest.client.api.IGenericClient; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; @@ -35,9 +32,10 @@ import org.apache.http.protocol.HttpContext; import java.io.IOException; /** - * @deprecated Use {@link ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor} instead. Note that BasicAuthInterceptor class is a HAPI client interceptor instead of being a commons-httpclient interceptor, so you register it to your client instance once it's created using {@link IGenericClient#registerInterceptor(IClientInterceptor)} or {@link IBasicClient#registerInterceptor(IClientInterceptor)} instead + * Apache HTTPClient interceptor which adds basic auth + * + * @see ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor A HAPI FHIR interceptor that is generally easier to use */ -@Deprecated public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { private String myUsername; diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 9a1b7253f50..720f2f90d66 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 2c0f93b3639..911c7eaea4e 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml @@ -233,6 +233,13 @@ provided + + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-hfql + ${project.version} + + ch.qos.logback logback-classic @@ -277,6 +284,7 @@ ${project.basedir}/src/assembly/hapi-fhir-standard-distribution.xml ${project.basedir}/src/assembly/hapi-fhir-android-distribution.xml ${project.basedir}/src/assembly/hapi-fhir-cli.xml + ${project.basedir}/src/assembly/hapi-fhir-hfql-jdbc-driver.xml hapi-fhir-${project.version} diff --git a/hapi-fhir-dist/src/assembly/hapi-fhir-android-distribution.xml b/hapi-fhir-dist/src/assembly/hapi-fhir-android-distribution.xml index ba42adafb18..61579ce0152 100644 --- a/hapi-fhir-dist/src/assembly/hapi-fhir-android-distribution.xml +++ b/hapi-fhir-dist/src/assembly/hapi-fhir-android-distribution.xml @@ -6,7 +6,6 @@ zip - tar.bz2 false diff --git a/hapi-fhir-dist/src/assembly/hapi-fhir-jpaserver-example.xml b/hapi-fhir-dist/src/assembly/hapi-fhir-hfql-jdbc-driver.xml similarity index 77% rename from hapi-fhir-dist/src/assembly/hapi-fhir-jpaserver-example.xml rename to hapi-fhir-dist/src/assembly/hapi-fhir-hfql-jdbc-driver.xml index facf0b4d47d..68d9f3f127f 100644 --- a/hapi-fhir-dist/src/assembly/hapi-fhir-jpaserver-example.xml +++ b/hapi-fhir-dist/src/assembly/hapi-fhir-hfql-jdbc-driver.xml @@ -1,7 +1,7 @@ - jpaserver-example + hfql-jdbc-driver zip @@ -11,11 +11,10 @@ - ${project.basedir}/../hapi-fhir-jpaserver-example + ${project.basedir}/../hapi-fhir-jpaserver-hfql/target/ / - pom.xml - src/** + hapi-fhir-hfql-jdbc-*.jar diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 1a0b971a168..ea61e279871 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-fhirpath-evaluate-parsed-method.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-fhirpath-evaluate-parsed-method.yaml new file mode 100644 index 00000000000..6f31ac1268f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-fhirpath-evaluate-parsed-method.yaml @@ -0,0 +1,7 @@ +--- +type: add +issue: 5083 +title: "The IFhirPath evaluator interface now has an additional overload of the + `evaluate` method which takes in a parsed expression returned by the + `parse` method. This can be used to improve performance in cases where the same + expression is being used repeatedly." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-hfql.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-hfql.yaml new file mode 100644 index 00000000000..085d4e26127 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5083-add-hfql.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 5083 +title: "A new SQL-like evaluator called the HAPI FHIR Query Language (HFQL) + has been added." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-add-hfql.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-add-hfql.yaml new file mode 100644 index 00000000000..9812c111463 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-add-hfql.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 5115 +title: "A new experimental SQL-like query syntax called HFQL (HAPI FHIR Query Language) + has been added." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-allow-genericclient-arbitrary-binary-response.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-allow-genericclient-arbitrary-binary-response.yaml new file mode 100644 index 00000000000..6922ff3f915 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5115-allow-genericclient-arbitrary-binary-response.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 5115 +title: "The Generic/Fluent client can now handle arbitrary (ie. non-FHIR) responses from $operation + invocation by specifying a response resource type of Binary." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties index 4a073716ef5..41de46a1a1e 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties @@ -85,6 +85,9 @@ page.server_jpa_partitioning.enabling_in_hapi_fhir=Enabling Partitioning in HAPI section.server_jpa_batch.title=JPA Server: Batch Processing page.server_jpa_batch.introduction=Batch Introduction +section.hfql.title=JPA Server: HFQL (SQL) Driver +page.hfql.hfql=HFQL Module + section.clinical_reasoning.title=Clinical Reasoning page.clinical_reasoning.overview=Clinical Reasoning Overview page.clinical_reasoning.cql=CQL diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/hfql/hfql.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/hfql/hfql.md new file mode 100644 index 00000000000..3a9e006d259 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/hfql/hfql.md @@ -0,0 +1,56 @@ +# FQL Driver: SQL For FHIR Repositories + +

+This is an experimental module. Use with caution. This API is likely to change. +
+ +The HAPI FHIR JPA server can optionally be configured to support SQL-like queries against the FHIR repository. This module is intended for analytical queries. It is not optimized for performance, and may take a long time to produce results. + +# Syntax + +This module uses a proprietary flavour of SQL that is specific to HAPI FHIR. It is similar to the [Firely Query Language](https://simplifier.net/docs/fql), although it also has differences. + +A simple example query is shown below: + +```sql +SELECT + name.family as family, + name.given as given, + birthDate, + identifier.where(system='http://hl7.org/fhir/sid/us-ssn').value as SSN +FROM + Patient +WHERE + active = true +``` + +See [SQL Syntax](https://smilecdr.com/docs/hfql/sql_syntax.html) for details on this syntax. + +# JDBC Driver + +When HFQL is enabled on the server, a JDBC-compatible driver is available. This can be used to query the FHIR server directly from a JDBC compliant database browser. + +This module has been tested with [DBeaver](https://dbeaver.io/), which is a free and excellent database browser. Other JDBC compatible database tools may also work. Note that not all JDBC API methods have been implemented in the driver, so other tools may use methods that have not yet been implemented. Please let us know in the [Google Group](https://groups.google.com/g/hapi-fhir) if you encounter issues or have suggestions. + +The JDBC driver can be downloaded from the [GitHub Releases site](https://github.com/hapifhir/hapi-fhir/releases). It can also be built from sources by executing the following command: + +```bash +mvn -DskipTests -P DIST clean install -pl :hapi-fhir-jpaserver-hfql -am +``` + +To import this driver into your database tool, import the JDBC JAR and use the following settings: + + + + + + + + + + + + + + +
SettingDescription
Class Nameca.uhn.fhir.jpa.fql.jdbc.JdbcDriver
URLjdbc:hapifhirql:[server_base_url]
UsernameIf provided, the username/password will be added as an HTTP Basic Authorization header on all requests to the server.
Password
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index f83f188f06c..755bcf1d5a5 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -182,6 +182,11 @@ hapi-fhir-jpaserver-model ${project.version} + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-hfql + ${project.version} + ca.uhn.hapi.fhir hapi-fhir-jpaserver-ips diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 9ede27f5875..88331d45254 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index c618237c461..2720f72bc97 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 9411eb36d2d..213b215cc59 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index c701d1392c3..5697302d49d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -2559,7 +2559,7 @@ public class QueryStack { mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName); if (fullChainParam != null) { List swappedParamTypes = nextAnd.stream() - .map(t -> toParameterType(fullChainParam, null, t.getValueAsQueryToken(myFhirContext))) + .map(t -> newParameterInstance(fullChainParam, null, t.getValueAsQueryToken(myFhirContext))) .collect(Collectors.toList()); List> params = List.of(swappedParamTypes); Condition predicate = createPredicateSearchParameter( @@ -2660,15 +2660,15 @@ public class QueryStack { mySqlBuilder.addPredicate(predicate); } - public IQueryParameterType toParameterType( + public IQueryParameterType newParameterInstance( RuntimeSearchParam theParam, String theQualifier, String theValueAsQueryToken) { - IQueryParameterType qp = toParameterType(theParam); + IQueryParameterType qp = newParameterInstance(theParam); qp.setValueAsQueryToken(myFhirContext, theParam.getName(), theQualifier, theValueAsQueryToken); return qp; } - private IQueryParameterType toParameterType(RuntimeSearchParam theParam) { + private IQueryParameterType newParameterInstance(RuntimeSearchParam theParam) { IQueryParameterType qp; switch (theParam.getParamType()) { @@ -2694,8 +2694,8 @@ public class QueryStack { throw new InternalErrorException(Msg.code(1224) + "Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); } - IQueryParameterType leftParam = toParameterType(compositeOf.get(0)); - IQueryParameterType rightParam = toParameterType(compositeOf.get(1)); + IQueryParameterType leftParam = newParameterInstance(compositeOf.get(0)); + IQueryParameterType rightParam = newParameterInstance(compositeOf.get(1)); qp = new CompositeParam<>(leftParam, rightParam); break; case URI: @@ -2876,7 +2876,7 @@ public class QueryStack { if (RestSearchParameterTypeEnum.REFERENCE.equals(nextSearchParam.getParamType())) { orValues.add(new ReferenceParam(nextQualifier, "", theTargetValue)); } else { - IQueryParameterType qp = toParameterType(nextSearchParam); + IQueryParameterType qp = newParameterInstance(nextSearchParam); qp.setValueAsQueryToken(myFhirContext, nextSearchParam.getName(), null, theTargetValue); orValues.add(qp); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java index 10c17cc2541..5952f398c56 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java @@ -692,7 +692,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im type.setValueAsQueryToken(getFhirContext(), theParamName, qualifier, resourceId); chainValue = type; } else { - chainValue = myQueryStack.toParameterType(param, qualifier, resourceId); + chainValue = myQueryStack.newParameterInstance(param, qualifier, resourceId); } return chainValue; diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index 7a993dd23a0..a00afb5256a 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml new file mode 100644 index 00000000000..1446717ef32 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -0,0 +1,68 @@ + + 4.0.0 + + ca.uhn.hapi.fhir + hapi-deployable-pom + 6.7.15-SNAPSHOT + ../hapi-deployable-pom/pom.xml + + + hapi-fhir-jpaserver-hfql + jar + HAPI FHIR JPA Server - HFQL Driver + + + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-base + ${project.version} + + + + + javax.servlet + javax.servlet-api + provided + + + + + ca.uhn.hapi.fhir + hapi-fhir-test-utilities + ${project.version} + test + + + + + + DIST + + + + org.apache.maven.plugins + maven-assembly-plugin + + + make-assembly + package + + single + + + + + + jar-with-dependencies + + false + hapi-fhir-hfql-jdbc-${project.version} + false + + + + + + + + diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlDataTypeEnum.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlDataTypeEnum.java new file mode 100644 index 00000000000..3bc711c6bf8 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlDataTypeEnum.java @@ -0,0 +1,46 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Types; + +public enum HfqlDataTypeEnum { + STRING(Types.VARCHAR, String.class), + JSON(Types.VARCHAR, String.class), + INTEGER(Types.INTEGER, Integer.class), + BOOLEAN(Types.BOOLEAN, Boolean.class), + DATE(Types.DATE, Date.class), + TIMESTAMP(Types.TIMESTAMP_WITH_TIMEZONE, Date.class), + LONGINT(Types.BIGINT, Long.class), + TIME(Types.TIME, String.class), + DECIMAL(Types.DECIMAL, BigDecimal.class); + + private final int mySqlType; + + HfqlDataTypeEnum(int theSqlType, Class theJavaType) { + mySqlType = theSqlType; + } + + public int getSqlType() { + return mySqlType; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutor.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutor.java new file mode 100644 index 00000000000..4edb5ec95f3 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutor.java @@ -0,0 +1,921 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.context.BaseRuntimeChildDefinition; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.fhirpath.FhirPathExecutionException; +import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.fql.parser.HfqlFhirPathParser; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatementParser; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; +import ca.uhn.fhir.model.api.IQueryParameterAnd; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.QualifiedParamList; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.DateOrListParam; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.QualifierDetails; +import ca.uhn.fhir.rest.param.TokenOrListParam; +import ca.uhn.fhir.rest.server.IPagingProvider; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import ca.uhn.fhir.util.UrlUtil; +import com.google.common.collect.Lists; +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.Validate; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.DateTimeType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import java.math.BigDecimal; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; +import static org.apache.commons.lang3.StringUtils.isBlank; + +/** + * This class could be considered the main entrypoint into the HFQL executor. + * It receives a raw HFQL query, parses it, executes it, and returns a result set. + * Conceptually the {@link #executeInitialSearch(String, Integer, RequestDetails)} + * method can be thought of like the JPA DAO search method, and the + * {@link #executeContinuation(HfqlStatement, String, int, Integer, RequestDetails)} + * can be thought of like loading a subsequent page of the search results. + *

+ * Both of these methods return an {@link IHfqlExecutionResult}, which is essentially + * a result row iterator. + */ +public class HfqlExecutor implements IHfqlExecutor { + public static final int BATCH_SIZE = 1000; + public static final String[] EMPTY_STRING_ARRAY = new String[0]; + public static final Set NULL_GROUP_BY_KEY = Set.of(new GroupByKey(List.of())); + private static final Logger ourLog = LoggerFactory.getLogger(HfqlExecutor.class); + + @Autowired + private DaoRegistry myDaoRegistry; + + @Autowired + private FhirContext myFhirContext; + + @Autowired + private IPagingProvider myPagingProvider; + + @Autowired + private ISearchParamRegistry mySearchParamRegistry; + + /** + * Constructor + */ + public HfqlExecutor() { + super(); + } + + @Override + public IHfqlExecutionResult executeInitialSearch( + String theStatement, Integer theLimit, RequestDetails theRequestDetails) { + try { + return doExecuteInitialSearch(theStatement, theLimit, theRequestDetails); + } catch (Exception e) { + ourLog.warn("Failed to execute HFFQL statement", e); + return StaticHfqlExecutionResult.withError(defaultIfNull(e.getMessage(), "(no message)")); + } + } + + @Nonnull + private IHfqlExecutionResult doExecuteInitialSearch( + String theStatement, Integer theLimit, RequestDetails theRequestDetails) { + HfqlStatementParser parser = new HfqlStatementParser(myFhirContext, theStatement); + HfqlStatement statement = parser.parse(); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(statement.getFromResourceName()); + if (dao == null) { + throw new DataFormatException( + Msg.code(2406) + "Unknown or unsupported FROM type: " + statement.getFromResourceName()); + } + + massageSelectColumnNames(statement); + populateSelectColumnDataTypes(statement); + + SearchParameterMap map = new SearchParameterMap(); + addHfqlWhereClausesToSearchParameterMap(statement, map); + + Integer limit = theLimit; + if (statement.hasOrderClause()) { + /* + * If we're ordering search results, we need to load all available data in order + * to sort it because we handle ordering in application code currently. A good + * future optimization would be to handle ordering in the database when possible, + * but we can't always do that because the query can specify an order on any + * arbitrary FHIRPath expression. + */ + limit = null; + } else if (statement.getLimit() != null) { + limit = limit == null ? statement.getLimit() : Math.min(limit, statement.getLimit()); + } + + HfqlExecutionContext executionContext = new HfqlExecutionContext(myFhirContext.newFhirPath()); + IBundleProvider outcome = dao.search(map, theRequestDetails); + Predicate whereClausePredicate = newWhereClausePredicate(executionContext, statement); + + IHfqlExecutionResult executionResult; + if (statement.hasCountClauses()) { + executionResult = executeCountClause(statement, executionContext, outcome, whereClausePredicate); + } else { + executionResult = new LocalSearchHfqlExecutionResult( + statement, outcome, executionContext, limit, 0, whereClausePredicate, myFhirContext); + } + + if (statement.hasOrderClause()) { + executionResult = createOrderedResult(statement, executionResult); + } + + return executionResult; + } + + private void addHfqlWhereClausesToSearchParameterMap(HfqlStatement statement, SearchParameterMap map) { + List searchClauses = statement.getWhereClauses(); + for (HfqlStatement.WhereClause nextSearchClause : searchClauses) { + if (nextSearchClause.getOperator() != HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH) { + continue; + } + + if (!"id".equals(nextSearchClause.getLeft())) { + throw new InvalidRequestException( + Msg.code(2412) + "search_match function can only be applied to the id element"); + } + + if (nextSearchClause.getRight().size() != 2) { + throw new InvalidRequestException(Msg.code(2413) + "search_match function requires 2 arguments"); + } + + List argumentStrings = nextSearchClause.getRightAsStrings(); + String paramName = argumentStrings.get(0); + String paramValueUnsplit = argumentStrings.get(1); + List paramValues = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, paramValueUnsplit); + + if (paramName.equals(Constants.PARAM_ID)) { + map.add(Constants.PARAM_ID, new TokenOrListParam(null, paramValues.toArray(EMPTY_STRING_ARRAY))); + } else if (paramName.equals(Constants.PARAM_LASTUPDATED)) { + DateOrListParam param = new DateOrListParam(); + for (String nextValue : paramValues) { + param.addOr(new DateParam(nextValue)); + } + map.add(Constants.PARAM_LASTUPDATED, param); + } else if (paramName.startsWith("_")) { + throw newInvalidRequestExceptionUnknownSearchParameter(paramName); + } else { + QualifierDetails qualifiedParamName = QualifierDetails.extractQualifiersFromParameterName(paramName); + + RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam( + statement.getFromResourceName(), qualifiedParamName.getParamName()); + if (searchParam == null) { + throw newInvalidRequestExceptionUnknownSearchParameter(paramName); + } + + QualifiedParamList values = new QualifiedParamList(); + values.setQualifier(qualifiedParamName.getWholeQualifier()); + values.addAll(paramValues); + IQueryParameterAnd andParam = JpaParamUtil.parseQueryParams( + myFhirContext, searchParam.getParamType(), paramName, List.of(values)); + map.add(qualifiedParamName.getParamName(), andParam); + } + } + } + + private IHfqlExecutionResult createOrderedResult( + HfqlStatement theStatement, IHfqlExecutionResult theExecutionResult) { + List rows = new ArrayList<>(); + while (theExecutionResult.hasNext()) { + IHfqlExecutionResult.Row nextRow = theExecutionResult.getNextRow(); + rows.add(nextRow); + Validate.isTrue( + rows.size() <= HfqlConstants.ORDER_AND_GROUP_LIMIT, + "Can not ORDER BY result sets over %d results", + HfqlConstants.ORDER_AND_GROUP_LIMIT); + } + + List orderColumnIndexes = theStatement.getOrderByClauses().stream() + .map(t -> { + int index = theStatement.findSelectClauseIndex(t.getClause()); + if (index == -1) { + throw new InvalidRequestException( + Msg.code(2407) + "Invalid/unknown ORDER BY clause: " + t.getClause()); + } + return index; + }) + .collect(Collectors.toList()); + List orderAscending = theStatement.getOrderByClauses().stream() + .map(HfqlStatement.OrderByClause::isAscending) + .collect(Collectors.toList()); + + Comparator comparator = null; + for (int i = 0; i < orderColumnIndexes.size(); i++) { + int columnIndex = orderColumnIndexes.get(i); + HfqlDataTypeEnum dataType = theExecutionResult + .getStatement() + .getSelectClauses() + .get(columnIndex) + .getDataType(); + Comparator nextComparator = newRowComparator(columnIndex, dataType); + if (!orderAscending.get(i)) { + nextComparator = nextComparator.reversed(); + } + if (comparator == null) { + comparator = nextComparator; + } else { + comparator = comparator.thenComparing(nextComparator); + } + } + + rows.sort(comparator); + for (int i = 0; i < rows.size(); i++) { + rows.set(i, rows.get(i).toRowOffset(i)); + } + + List> rowData = + rows.stream().map(IHfqlExecutionResult.Row::getRowValues).collect(Collectors.toList()); + return new StaticHfqlExecutionResult(null, theStatement, rowData); + } + + @Override + public IHfqlExecutionResult executeContinuation( + HfqlStatement theStatement, + String theSearchId, + int theStartingOffset, + Integer theLimit, + RequestDetails theRequestDetails) { + IBundleProvider resultList = myPagingProvider.retrieveResultList(theRequestDetails, theSearchId); + HfqlExecutionContext executionContext = new HfqlExecutionContext(myFhirContext.newFhirPath()); + Predicate whereClausePredicate = newWhereClausePredicate(executionContext, theStatement); + return new LocalSearchHfqlExecutionResult( + theStatement, + resultList, + executionContext, + theLimit, + theStartingOffset, + whereClausePredicate, + myFhirContext); + } + + private IHfqlExecutionResult executeCountClause( + HfqlStatement theStatement, + HfqlExecutionContext theExecutionContext, + IBundleProvider theOutcome, + Predicate theWhereClausePredicate) { + + Set selectClauses = theStatement.getSelectClauses().stream() + .filter(t -> t.getOperator() == HfqlStatement.SelectClauseOperator.SELECT) + .map(HfqlStatement.SelectClause::getClause) + .collect(Collectors.toSet()); + for (String next : selectClauses) { + if (!theStatement.getGroupByClauses().contains(next)) { + throw newInvalidRequestCountWithSelectOnNonGroupedClause(next); + } + } + Set countClauses = theStatement.getSelectClauses().stream() + .filter(t -> t.getOperator() == HfqlStatement.SelectClauseOperator.COUNT) + .map(HfqlStatement.SelectClause::getClause) + .collect(Collectors.toSet()); + + Map> keyCounter = new HashMap<>(); + + int offset = 0; + int batchSize = 1000; + while (theOutcome.size() == null || theOutcome.sizeOrThrowNpe() > offset) { + List resources = theOutcome.getResources(offset, offset + batchSize); + + for (IBaseResource nextResource : resources) { + + if (nextResource != null && theWhereClausePredicate.test(nextResource)) { + + List> groupByClauseValues = new ArrayList<>(); + + for (String nextClause : theStatement.getGroupByClauses()) { + List nextClauseValues = + theExecutionContext.evaluate(nextResource, nextClause, IPrimitiveType.class).stream() + .map(IPrimitiveType::getValueAsString) + .collect(Collectors.toList()); + if (nextClauseValues.isEmpty()) { + nextClauseValues.add(null); + } + groupByClauseValues.add(nextClauseValues); + } + Set allKeys = createCrossProduct(groupByClauseValues); + + for (GroupByKey nextKey : allKeys) { + + Map counts = keyCounter.computeIfAbsent(nextKey, t -> new HashMap<>()); + if (keyCounter.size() >= HfqlConstants.ORDER_AND_GROUP_LIMIT) { + throw new InvalidRequestException(Msg.code(2402) + "Can not group on > " + + HfqlConstants.ORDER_AND_GROUP_LIMIT + " terms"); + } + for (String nextCountClause : countClauses) { + if (!nextCountClause.equals("*")) { + if (theExecutionContext + .evaluateFirst(nextResource, nextCountClause, IBase.class) + .isEmpty()) { + continue; + } + } + counts.computeIfAbsent(nextCountClause, k -> new AtomicInteger()) + .incrementAndGet(); + } + } + } + } + + offset += batchSize; + } + + List> rows = new ArrayList<>(); + for (Map.Entry> nextEntry : keyCounter.entrySet()) { + List nextRow = new ArrayList<>(); + + for (HfqlStatement.SelectClause nextSelectClause : theStatement.getSelectClauses()) { + if (nextSelectClause.getOperator() == HfqlStatement.SelectClauseOperator.SELECT) { + int groupByIndex = theStatement.getGroupByClauses().indexOf(nextSelectClause.getClause()); + nextRow.add(nextEntry.getKey().getNames().get(groupByIndex)); + } else { + AtomicInteger counter = nextEntry.getValue().get(nextSelectClause.getClause()); + if (counter != null) { + nextRow.add(counter.intValue()); + } else { + nextRow.add(0); + } + } + } + + rows.add(nextRow); + } + + return new StaticHfqlExecutionResult(null, theStatement, rows); + } + + private Set createCrossProduct(List> theGroupByClauseValues) { + if (theGroupByClauseValues.isEmpty()) { + return NULL_GROUP_BY_KEY; + } + Set retVal = new HashSet<>(); + List valueHolder = new ArrayList<>(); + createCrossProductRecurse(theGroupByClauseValues, retVal, valueHolder); + return retVal; + } + + private void createCrossProductRecurse( + List> theGroupByClauseValues, + Set theGroupsSetToPopulate, + List theCurrentValueChain) { + List nextOptions = theGroupByClauseValues.get(0); + for (String nextOption : nextOptions) { + theCurrentValueChain.add(nextOption); + + if (theGroupByClauseValues.size() == 1) { + theGroupsSetToPopulate.add(new GroupByKey(theCurrentValueChain)); + } else { + createCrossProductRecurse( + theGroupByClauseValues.subList(1, theGroupByClauseValues.size()), + theGroupsSetToPopulate, + theCurrentValueChain); + } + + theCurrentValueChain.remove(theCurrentValueChain.size() - 1); + } + } + + private Predicate newWhereClausePredicate( + HfqlExecutionContext theExecutionContext, HfqlStatement theStatement) { + return r -> { + for (HfqlStatement.WhereClause nextWhereClause : theStatement.getWhereClauses()) { + + boolean haveMatch; + try { + switch (nextWhereClause.getOperator()) { + case SEARCH_MATCH: + // These are handled earlier so we don't need to test here + haveMatch = true; + break; + case UNARY_BOOLEAN: { + haveMatch = evaluateWhereClauseUnaryBoolean(theExecutionContext, r, nextWhereClause); + break; + } + case EQUALS: + case IN: + default: { + haveMatch = evaluateWhereClauseBinaryEqualsOrIn(theExecutionContext, r, nextWhereClause); + break; + } + } + } catch (FhirPathExecutionException e) { + throw new InvalidRequestException(Msg.code(2403) + "Unable to evaluate FHIRPath expression \"" + + nextWhereClause.getLeft() + "\". Error: " + e.getMessage()); + } + + if (!haveMatch) { + return false; + } + } + + return true; + }; + } + + private void populateSelectColumnDataTypes(HfqlStatement statement) { + HfqlFhirPathParser fhirPathParser = new HfqlFhirPathParser(myFhirContext); + for (HfqlStatement.SelectClause nextSelectClause : statement.getSelectClauses()) { + HfqlDataTypeEnum nextType; + if (nextSelectClause.getOperator() == HfqlStatement.SelectClauseOperator.COUNT) { + nextType = HfqlDataTypeEnum.INTEGER; + } else { + String clause = nextSelectClause.getClause(); + if (clause.equals("meta.versionId")) { + // FHIR's versionId field is a string, but in HAPI FHIR JPA it can only ever be a long so we'll + // use that type + nextType = HfqlDataTypeEnum.LONGINT; + } else { + nextType = fhirPathParser.determineDatatypeForPath(statement.getFromResourceName(), clause); + nextType = defaultIfNull(nextType, HfqlDataTypeEnum.STRING); + } + } + nextSelectClause.setDataType(nextType); + } + } + + /** + * This method replaces a SELECT-ed column named "*" with a collection of + * available column names for the given resource type. + */ + private void massageSelectColumnNames(HfqlStatement theHfqlStatement) { + + List selectClauses = theHfqlStatement.getSelectClauses(); + for (int i = 0; i < selectClauses.size(); i++) { + HfqlStatement.SelectClause selectClause = selectClauses.get(i); + if (selectClause.getOperator() == HfqlStatement.SelectClauseOperator.SELECT) { + if ("*".equals(selectClause.getClause())) { + resolveAndReplaceStarInSelectClauseAtIndex(theHfqlStatement, selectClauses, i); + } + } + } + } + + private void resolveAndReplaceStarInSelectClauseAtIndex( + HfqlStatement theHfqlStatement, List theSelectClauses, int theIndex) { + String resourceName = theHfqlStatement.getFromResourceName(); + TreeSet allLeafPaths = findLeafPaths(resourceName); + + theSelectClauses.remove(theIndex); + List reversedLeafPaths = new ArrayList<>(allLeafPaths); + reversedLeafPaths = Lists.reverse(reversedLeafPaths); + reversedLeafPaths.forEach(t -> theSelectClauses.add(theIndex, new HfqlStatement.SelectClause(t).setAlias(t))); + } + + @Nonnull + private TreeSet findLeafPaths(String theResourceName) { + TreeSet allLeafPaths = new TreeSet<>(); + RuntimeResourceDefinition def = myFhirContext.getResourceDefinition(theResourceName); + for (BaseRuntimeChildDefinition nextChild : def.getChildren()) { + for (String next : nextChild.getValidChildNames()) { + if (!"extension".equals(next) && !"modifierExtension".equals(next)) { + allLeafPaths.add(next); + } + } + } + return allLeafPaths; + } + + /** + * Columns to return, per {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} + *
    + *
  1. TABLE_CAT String {@code =>} table catalog (may be {@code null}) + *
  2. TABLE_SCHEM String {@code =>} table schema (may be {@code null}) + *
  3. TABLE_NAME String {@code =>} table name + *
  4. TABLE_TYPE String {@code =>} table type. Typical types are "TABLE", + * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", + * "LOCAL TEMPORARY", "ALIAS", "SYNONYM". + *
  5. REMARKS String {@code =>} explanatory comment on the table (may be {@code null}) + *
  6. TYPE_CAT String {@code =>} the types catalog (may be {@code null}) + *
  7. TYPE_SCHEM String {@code =>} the types schema (may be {@code null}) + *
  8. TYPE_NAME String {@code =>} type name (may be {@code null}) + *
  9. SELF_REFERENCING_COL_NAME String {@code =>} name of the designated + * "identifier" column of a typed table (may be {@code null}) + *
  10. REF_GENERATION String {@code =>} specifies how values in + * SELF_REFERENCING_COL_NAME are created. Values are + * "SYSTEM", "USER", "DERIVED". (may be {@code null}) + *
+ */ + @Override + public IHfqlExecutionResult introspectTables() { + List columns = List.of( + "TABLE_CAT", + "TABLE_SCHEM", + "TABLE_NAME", + "TABLE_TYPE", + "REMARKS", + "TYPE_CAT", + "TYPE_SCHEM", + "TYPE_NAME", + "SELF_REFERENCING_COL_NAME", + "REF_GENERATION"); + List dataTypes = List.of( + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING, + HfqlDataTypeEnum.STRING); + List> rows = new ArrayList<>(); + + TreeSet resourceTypes = new TreeSet<>(myFhirContext.getResourceTypes()); + for (String next : resourceTypes) { + rows.add(Lists.newArrayList(null, null, next, "TABLE", null, null, null, null, null, null)); + } + + return new StaticHfqlExecutionResult(null, columns, dataTypes, rows); + } + + /** + * Columns from {@link java.sql.DatabaseMetaData#getColumns(String, String, String, String)} + * + *
    + *
  1. TABLE_CAT String {@code =>} table catalog (may be {@code null}) + *
  2. TABLE_SCHEM String {@code =>} table schema (may be {@code null}) + *
  3. TABLE_NAME String {@code =>} table name + *
  4. COLUMN_NAME String {@code =>} column name + *
  5. DATA_TYPE int {@code =>} SQL type from java.sql.Types + *
  6. TYPE_NAME String {@code =>} Data source dependent type name, + * for a UDT the type name is fully qualified + *
  7. COLUMN_SIZE int {@code =>} column size. + *
  8. BUFFER_LENGTH is not used. + *
  9. DECIMAL_DIGITS int {@code =>} the number of fractional digits. Null is returned for data types where + * DECIMAL_DIGITS is not applicable. + *
  10. NUM_PREC_RADIX int {@code =>} Radix (typically either 10 or 2) + *
  11. NULLABLE int {@code =>} is NULL allowed. + *
      + *
    • columnNoNulls - might not allow {@code NULL} values + *
    • columnNullable - definitely allows {@code NULL} values + *
    • columnNullableUnknown - nullability unknown + *
    + *
  12. REMARKS String {@code =>} comment describing column (may be {@code null}) + *
  13. COLUMN_DEF String {@code =>} default value for the column, which should be interpreted as a string when the value is enclosed in single quotes (may be {@code null}) + *
  14. SQL_DATA_TYPE int {@code =>} unused + *
  15. SQL_DATETIME_SUB int {@code =>} unused + *
  16. CHAR_OCTET_LENGTH int {@code =>} for char types the + * maximum number of bytes in the column + *
  17. ORDINAL_POSITION int {@code =>} index of column in table + * (starting at 1) + *
  18. IS_NULLABLE String {@code =>} ISO rules are used to determine the nullability for a column. + *
      + *
    • YES --- if the column can include NULLs + *
    • NO --- if the column cannot include NULLs + *
    • empty string --- if the nullability for the + * column is unknown + *
    + *
  19. SCOPE_CATALOG String {@code =>} catalog of table that is the scope + * of a reference attribute ({@code null} if DATA_TYPE isn't REF) + *
  20. SCOPE_SCHEMA String {@code =>} schema of table that is the scope + * of a reference attribute ({@code null} if the DATA_TYPE isn't REF) + *
  21. SCOPE_TABLE String {@code =>} table name that this the scope + * of a reference attribute ({@code null} if the DATA_TYPE isn't REF) + *
  22. SOURCE_DATA_TYPE short {@code =>} source type of a distinct type or user-generated + * Ref type, SQL type from java.sql.Types ({@code null} if DATA_TYPE + * isn't DISTINCT or user-generated REF) + *
  23. IS_AUTOINCREMENT String {@code =>} Indicates whether this column is auto incremented + *
      + *
    • YES --- if the column is auto incremented + *
    • NO --- if the column is not auto incremented + *
    • empty string --- if it cannot be determined whether the column is auto incremented + *
    + *
  24. IS_GENERATEDCOLUMN String {@code =>} Indicates whether this is a generated column + *
      + *
    • YES --- if this a generated column + *
    • NO --- if this not a generated column + *
    • empty string --- if it cannot be determined whether this is a generated column + *
    + *
+ * + * @param theTableName The table name or null + * @param theColumnName The column name or null + */ + @Override + public IHfqlExecutionResult introspectColumns(@Nullable String theTableName, @Nullable String theColumnName) { + List columns = List.of( + "TABLE_CAT", + "TABLE_SCHEM", + "TABLE_NAME", + "COLUMN_NAME", + "DATA_TYPE", + "TYPE_NAME", + "COLUMN_SIZE", + "BUFFER_LENGTH", + "DECIMAL_DIGITS", + "NUM_PREC_RADIX", + "NULLABLE", + "REMARKS", + "COLUMN_DEF", + "SQL_DATA_TYPE", + "SQL_DATETIME_SUB", + "CHAR_OCTET_LENGTH", + "ORDINAL_POSITION", + "IS_NULLABLE", + "SCOPE_CATALOG", + "SCOPE_SCHEMA", + "SCOPE_TABLE", + "SOURCE_DATA_TYPE", + "IS_AUTOINCREMENT", + "IS_GENERATEDCOLUMN"); + List dataTypes = List.of( + HfqlDataTypeEnum.STRING, // TABLE_CAT + HfqlDataTypeEnum.STRING, // TABLE_SCHEM + HfqlDataTypeEnum.STRING, // TABLE_NAME + HfqlDataTypeEnum.STRING, // COLUMN_NAME + HfqlDataTypeEnum.INTEGER, // DATA_TYPE + HfqlDataTypeEnum.STRING, // TYPE_NAME + HfqlDataTypeEnum.INTEGER, // COLUMN_SIZE + HfqlDataTypeEnum.STRING, // BUFFER_LENGTH + HfqlDataTypeEnum.INTEGER, // DECIMAL_DIGITS + HfqlDataTypeEnum.INTEGER, // NUM_PREC_RADIX + HfqlDataTypeEnum.INTEGER, // NULLABLE + HfqlDataTypeEnum.STRING, // REMARKS + HfqlDataTypeEnum.STRING, // COLUMN_DEF + HfqlDataTypeEnum.INTEGER, // SQL_DATA_TYPE + HfqlDataTypeEnum.INTEGER, // SQL_DATETIME_SUB + HfqlDataTypeEnum.INTEGER, // CHAR_OCTET_LENGTH + HfqlDataTypeEnum.INTEGER, // ORDINAL_POSITION + HfqlDataTypeEnum.STRING, // IS_NULLABLE + HfqlDataTypeEnum.STRING, // SCOPE_CATALOG + HfqlDataTypeEnum.STRING, // SCOPE_SCHEMA + HfqlDataTypeEnum.STRING, // SCOPE_TABLE + HfqlDataTypeEnum.STRING, // SOURCE_DATA_TYPE + HfqlDataTypeEnum.STRING, // IS_AUTOINCREMENT + HfqlDataTypeEnum.STRING // IS_GENERATEDCOLUMN + ); + + List> rows = new ArrayList<>(); + for (String nextResourceType : new TreeSet<>(myFhirContext.getResourceTypes())) { + if (isBlank(theTableName) || theTableName.equals(nextResourceType)) { + TreeSet leafPaths = findLeafPaths(nextResourceType); + int position = 1; + for (String nextLeafPath : leafPaths) { + if (isBlank(theColumnName) || theColumnName.equals(nextLeafPath)) { + rows.add(Lists.newArrayList( + null, + null, + nextResourceType, + nextLeafPath, + Types.VARCHAR, + "string", + -1, + null, + null, + null, + 1, // nullable + null, + null, + null, + null, + null, + position++, + "YES", + null, + null, + null, + null, + "NO", + "NO")); + } + } + } + } + + return new StaticHfqlExecutionResult(null, columns, dataTypes, rows); + } + + @SuppressWarnings("unchecked") + static Comparator newRowComparator(int columnIndex, HfqlDataTypeEnum dataType) { + return Comparator.comparing(new RowValueExtractor(columnIndex, dataType)); + } + + private static boolean evaluateWhereClauseUnaryBoolean( + HfqlExecutionContext theExecutionContext, IBaseResource r, HfqlStatement.WhereClause theNextWhereClause) { + boolean haveMatch = false; + assert theNextWhereClause.getRight().isEmpty(); + List values = + theExecutionContext.evaluate(r, theNextWhereClause.getLeft(), IPrimitiveType.class); + for (IPrimitiveType nextValue : values) { + if (Boolean.TRUE.equals(nextValue.getValue())) { + haveMatch = true; + break; + } + } + return haveMatch; + } + + private static boolean evaluateWhereClauseBinaryEqualsOrIn( + HfqlExecutionContext theExecutionContext, IBaseResource r, HfqlStatement.WhereClause theNextWhereClause) { + boolean haveMatch = false; + List values = theExecutionContext.evaluate(r, theNextWhereClause.getLeft(), IBase.class); + for (IBase nextValue : values) { + for (String nextRight : theNextWhereClause.getRight()) { + String expression = "$this = " + nextRight; + IPrimitiveType outcome = theExecutionContext + .evaluateFirst(nextValue, expression, IPrimitiveType.class) + .orElseThrow(IllegalStateException::new); + Boolean value = (Boolean) outcome.getValue(); + haveMatch = value; + if (haveMatch) { + break; + } + } + if (haveMatch) { + break; + } + } + return haveMatch; + } + + @Nonnull + private static InvalidRequestException newInvalidRequestExceptionUnknownSearchParameter(String theParamName) { + return new InvalidRequestException( + "Unknown/unsupported search parameter: " + UrlUtil.sanitizeUrlPart(theParamName)); + } + + @Nonnull + private static InvalidRequestException newInvalidRequestCountWithSelectOnNonGroupedClause(String theClause) { + return new InvalidRequestException( + "Unable to select on non-grouped column in a count expression: " + UrlUtil.sanitizeUrlPart(theClause)); + } + + private static class RowValueExtractor implements Function { + private final int myColumnIndex; + private final HfqlDataTypeEnum myDataType; + + public RowValueExtractor(int theColumnIndex, HfqlDataTypeEnum theDataType) { + myColumnIndex = theColumnIndex; + myDataType = theDataType; + } + + @Override + public Comparable apply(IHfqlExecutionResult.Row theRow) { + Comparable retVal = (Comparable) theRow.getRowValues().get(myColumnIndex); + switch (myDataType) { + case STRING: + case TIME: + case JSON: + retVal = defaultIfNull(retVal, ""); + break; + case LONGINT: + case INTEGER: + if (retVal instanceof Number) { + return retVal; + } else if (retVal == null) { + retVal = Long.MIN_VALUE; + } else { + retVal = Long.parseLong((String) retVal); + } + break; + case BOOLEAN: + if (retVal == null) { + retVal = Boolean.FALSE; + } else { + retVal = Boolean.parseBoolean((String) retVal); + } + break; + case DATE: + case TIMESTAMP: + if (retVal != null) { + retVal = new DateTimeType((String) retVal).getValue(); + } + if (retVal == null) { + retVal = new Date(Long.MIN_VALUE); + } + break; + case DECIMAL: + if (retVal == null) { + retVal = BigDecimal.valueOf(Long.MIN_VALUE); + } else { + retVal = new BigDecimal((String) retVal); + } + break; + } + return retVal; + } + } + + private static class GroupByKey { + private final int myHashCode; + private List myNames; + + /** + * @param theNames A copy of the list will be stored + */ + public GroupByKey(List theNames) { + myNames = new ArrayList<>(theNames); + + HashCodeBuilder hashCodeBuilder = new HashCodeBuilder(); + myNames.forEach(hashCodeBuilder::append); + myHashCode = hashCodeBuilder.toHashCode(); + } + + @Override + public boolean equals(Object theO) { + boolean retVal = false; + if (theO instanceof GroupByKey) { + List otherNames = ((GroupByKey) theO).myNames; + retVal = ListUtils.isEqualList(myNames, otherNames); + } + return retVal; + } + + @Override + public int hashCode() { + return myHashCode; + } + + public List getNames() { + return myNames; + } + } + + public static class HfqlExecutionContext { + + private final Map myFhirPathExpressionMap = new HashMap<>(); + private final IFhirPath myFhirPath; + + public HfqlExecutionContext(IFhirPath theFhirPath) { + myFhirPath = theFhirPath; + } + + public List evaluate(IBase theInput, String thePath, Class theReturnType) { + IFhirPath.IParsedExpression parsedExpression = getParsedExpression(thePath); + return myFhirPath.evaluate(theInput, parsedExpression, theReturnType); + } + + Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { + IFhirPath.IParsedExpression parsedExpression = getParsedExpression(thePath); + return myFhirPath.evaluateFirst(theInput, parsedExpression, theReturnType); + } + + private IFhirPath.IParsedExpression getParsedExpression(String thePath) { + IFhirPath.IParsedExpression parsedExpression = myFhirPathExpressionMap.get(thePath); + if (parsedExpression == null) { + try { + parsedExpression = myFhirPath.parse(thePath); + } catch (Exception e) { + throw new InvalidRequestException(Msg.code(2404) + e.getMessage(), e); + } + myFhirPathExpressionMap.put(thePath, parsedExpression); + } + return parsedExpression; + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutionResult.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutionResult.java new file mode 100644 index 00000000000..eb08ce191c5 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutionResult.java @@ -0,0 +1,89 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; + +import java.util.List; + +/** + * This interface represents a ResultSet returned by the HFQL query layer in + * {@link IHfqlExecutor}. Think of it as roughly equivalent to the JDBC + * {@link java.sql.ResultSet} except that it's the internal version of that. + *

+ * There are several implementations of this interface: + *

    + *
  • + * {@link LocalSearchHfqlExecutionResult} - Implementation backed by a database search. + * This is used inside the HAPI FHIR server that is handling HFQL queries. + *
  • + *
  • + * {@link StaticHfqlExecutionResult} - Static implementation with fixed results. This is + * usually used to represent errors and failed queries inside the HAPI FHIR server. + *
  • + *
  • + * {@link ca.uhn.fhir.jpa.fql.jdbc.RemoteHfqlExecutionResult} - This is used inside the + * JDBC driver (ie. remote from the HAPI FHIR server) and holds results that have + * been received over the wire. + *
  • + *
+ *

+ */ +public interface IHfqlExecutionResult { + + int ROW_OFFSET_ERROR = -1; + + boolean hasNext(); + + Row getNextRow(); + + boolean isClosed(); + + void close(); + + String getSearchId(); + + int getLimit(); + + HfqlStatement getStatement(); + + class Row { + + private final List myRowValues; + private final int myRowOffset; + + public Row(int theRowOffset, List theRowValues) { + myRowOffset = theRowOffset; + myRowValues = theRowValues; + } + + public int getRowOffset() { + return myRowOffset; + } + + public List getRowValues() { + return myRowValues; + } + + public Row toRowOffset(int theRowOffset) { + return new Row(theRowOffset, myRowValues); + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutor.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutor.java new file mode 100644 index 00000000000..348ccdb1ecf --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/IHfqlExecutor.java @@ -0,0 +1,70 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.rest.api.server.RequestDetails; + +import javax.annotation.Nullable; + +public interface IHfqlExecutor { + + /** + * Execute a FQL query and return the first page of data + * + * @param theStatement The FQL statement to execute + * @param theLimit The maximum number of records to retrieve + * @param theRequestDetails The request details associated with the request + * @return Returns a {@link IHfqlExecutionResult result object}. Note that the returned object is not thread safe. + */ + IHfqlExecutionResult executeInitialSearch(String theStatement, Integer theLimit, RequestDetails theRequestDetails); + + /** + * Load a subsequent page of data from a search initiated by a call to {@link #executeInitialSearch(String, Integer, RequestDetails)}. + * + * @param theStatement The parsed statement from the initial search. Available through a call to {@link IHfqlExecutionResult#getStatement()}. + * @param theSearchId The search ID from the initial search. Available through a call to {@link IHfqlExecutionResult#getSearchId()}. + * @param theLimit The maximum number of results to return (across all pages) + * @param theRequestDetails The request details associated with the request + * @param theStartingOffset The row offset count for the first result to return. This should be set to one higher than the last value returned by {@link IHfqlExecutionResult.Row#getRowOffset()}. + */ + IHfqlExecutionResult executeContinuation( + HfqlStatement theStatement, + String theSearchId, + int theStartingOffset, + Integer theLimit, + RequestDetails theRequestDetails); + + /** + * Provides a list of "tables", which are actually resource types, in order to + * support the JCBC {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} + * query. + */ + IHfqlExecutionResult introspectTables(); + + /** + * Provides a list of "columns", which are actually selected valid FHIRPath expressions + * that can be selected on a resource + * + * @param theTableName The table name or null + * @param theColumnName The column name or null + */ + IHfqlExecutionResult introspectColumns(@Nullable String theTableName, @Nullable String theColumnName); +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/LocalSearchHfqlExecutionResult.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/LocalSearchHfqlExecutionResult.java new file mode 100644 index 00000000000..41a2e77f624 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/LocalSearchHfqlExecutionResult.java @@ -0,0 +1,230 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.parser.IParser; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.function.Predicate; + +/** + * @see IHfqlExecutionResult for information about the purpose of this class + */ +public class LocalSearchHfqlExecutionResult implements IHfqlExecutionResult { + private static final Logger ourLog = LoggerFactory.getLogger(LocalSearchHfqlExecutionResult.class); + + private final IBundleProvider mySearchResult; + private final HfqlExecutor.HfqlExecutionContext myExecutionContext; + private final Integer myLimit; + private final HfqlStatement myStatement; + private final Predicate myWhereClausePredicate; + private final IParser myParser; + private int myTotalRowsFetched = 0; + private int myNextSearchResultRow; + private int myNextBatchRow = 0; + private List myNextBatch; + private IBaseResource myNextResource; + private boolean myExhausted = false; + private int myNextResourceSearchRow; + private Row myErrorRow; + + public LocalSearchHfqlExecutionResult( + HfqlStatement theStatement, + IBundleProvider theSearchResult, + HfqlExecutor.HfqlExecutionContext theExecutionContext, + Integer theLimit, + int theInitialOffset, + Predicate theWhereClausePredicate, + FhirContext theFhirContext) { + myStatement = theStatement; + mySearchResult = theSearchResult; + myExecutionContext = theExecutionContext; + myLimit = theLimit; + myNextSearchResultRow = theInitialOffset; + myWhereClausePredicate = theWhereClausePredicate; + myParser = theFhirContext.newJsonParser(); + } + + @Override + public boolean hasNext() { + fetchNextResource(); + return myNextResource != null; + } + + private void fetchNextResource() { + if (myNextResource != null) { + return; + } + try { + while (myNextResource == null && !myExhausted) { + if (myNextBatch == null) { + int from = myNextSearchResultRow; + int to = myNextSearchResultRow + HfqlExecutor.BATCH_SIZE; + myNextBatch = mySearchResult.getResources(from, to); + ourLog.info( + "HFQL fetching resources {}-{} - Total {} fetched, {} retained and limit {}", + from, + to, + myNextSearchResultRow, + myTotalRowsFetched, + myLimit); + myNextBatchRow = 0; + myNextSearchResultRow += HfqlExecutor.BATCH_SIZE; + } + if (myNextBatch.isEmpty()) { + myExhausted = true; + } else if (myNextBatch.size() > myNextBatchRow) { + myNextResource = myNextBatch.get(myNextBatchRow); + myNextResourceSearchRow = (myNextSearchResultRow - HfqlExecutor.BATCH_SIZE) + myNextBatchRow; + myNextBatchRow++; + } else { + myNextBatch = null; + } + + if (myNextResource != null && !myWhereClausePredicate.test(myNextResource)) { + myNextResource = null; + } + } + + if (myNextResource != null) { + myTotalRowsFetched++; + if (myLimit != null && myTotalRowsFetched >= myLimit) { + myExhausted = true; + } + } + } catch (Exception e) { + createAndStoreErrorRow(e.getMessage()); + } + } + + @Override + public Row getNextRow() { + fetchNextResource(); + if (myErrorRow != null) { + Row errorRow = myErrorRow; + myErrorRow = null; + return errorRow; + } + + Validate.isTrue(myNextResource != null, "No more results"); + + List values = new ArrayList<>(); + for (int columnIndex = 0; columnIndex < myStatement.getSelectClauses().size(); columnIndex++) { + HfqlStatement.SelectClause nextColumn = + myStatement.getSelectClauses().get(columnIndex); + String clause = nextColumn.getClause(); + HfqlDataTypeEnum columnDataType = nextColumn.getDataType(); + List columnValues; + try { + columnValues = myExecutionContext.evaluate(myNextResource, clause, IBase.class); + } catch (Exception e) { + String errorMessage = + "Failed to evaluate FHIRPath expression \"" + clause + "\". Error: " + e.getMessage(); + return createAndStoreErrorRow(errorMessage); + } + String value = null; + if (columnDataType == HfqlDataTypeEnum.JSON) { + StringBuilder b = new StringBuilder(); + b.append("["); + for (Iterator valueIter = columnValues.iterator(); valueIter.hasNext(); ) { + IBase next = valueIter.next(); + if (next instanceof IPrimitiveType) { + b.append('"'); + String encodedValue = encodeValue(next); + encodedValue = encodedValue.replace("\\", "\\\\").replace("\"", "\\\""); + b.append(encodedValue); + b.append('"'); + } else { + b.append(encodeValue(next)); + } + if (valueIter.hasNext()) { + b.append(", "); + } + } + b.append("]"); + value = b.toString(); + } else { + if (!columnValues.isEmpty()) { + IBase firstColumnValue = columnValues.get(0); + value = encodeValue(firstColumnValue); + } + } + + values.add(value); + } + + myNextResource = null; + return new Row(myNextResourceSearchRow, values); + } + + private String encodeValue(IBase firstColumnValue) { + String value = null; + if (firstColumnValue instanceof IIdType) { + value = ((IIdType) firstColumnValue).getIdPart(); + } else if (firstColumnValue != null) { + value = myParser.encodeToString(firstColumnValue); + } + return value; + } + + private Row createAndStoreErrorRow(String errorMessage) { + myExhausted = true; + myNextResource = null; + myErrorRow = new Row(IHfqlExecutionResult.ROW_OFFSET_ERROR, List.of(errorMessage)); + return myErrorRow; + } + + @Override + public boolean isClosed() { + return false; + } + + @Override + public void close() { + // ignore + } + + @Override + public String getSearchId() { + return mySearchResult.getUuid(); + } + + @Override + public int getLimit() { + return myLimit != null ? myLimit : -1; + } + + @Override + public HfqlStatement getStatement() { + return myStatement; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/StaticHfqlExecutionResult.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/StaticHfqlExecutionResult.java new file mode 100644 index 00000000000..bc8237acb5a --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/executor/StaticHfqlExecutionResult.java @@ -0,0 +1,124 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import javax.annotation.Nullable; + +/** + * @see IHfqlExecutionResult for information about the purpose of this class + */ +public class StaticHfqlExecutionResult implements IHfqlExecutionResult { + private final String mySearchId; + private final Iterator> myRowsIterator; + private int myNextRowOffset; + private HfqlStatement myStatement; + + /** + * Constructor for an empty result + * + * @param theSearchId The search ID associated with this result + */ + public StaticHfqlExecutionResult(@Nullable String theSearchId) { + this(theSearchId, new HfqlStatement(), Collections.emptyList()); + } + + /** + * Constructor for an empty result + * + * @param theSearchId The search ID associated with this result + */ + public StaticHfqlExecutionResult( + @Nullable String theSearchId, + List theColumnNames, + List theDataTypes, + List> theRows) { + this(theSearchId, toStatement(theColumnNames, theDataTypes), theRows); + } + + private static HfqlStatement toStatement(List theColumnNames, List theDataTypes) { + assert theColumnNames.size() == theDataTypes.size(); + + HfqlStatement retVal = new HfqlStatement(); + for (int i = 0; i < theColumnNames.size(); i++) { + retVal.addSelectClause(theColumnNames.get(i)) + .setAlias(theColumnNames.get(i)) + .setDataType(theDataTypes.get(i)); + } + return retVal; + } + + /** + * Constructor + */ + public StaticHfqlExecutionResult( + @Nullable String theSearchId, HfqlStatement theStatement, List> theRows) { + mySearchId = theSearchId; + myStatement = theStatement; + myRowsIterator = theRows.iterator(); + myNextRowOffset = 0; + } + + @Override + public boolean hasNext() { + return myRowsIterator.hasNext(); + } + + @Override + public Row getNextRow() { + return new Row(myNextRowOffset++, myRowsIterator.next()); + } + + @Override + public boolean isClosed() { + return false; + } + + @Override + public void close() { + // ignore + } + + @Override + public String getSearchId() { + return mySearchId; + } + + @Override + public int getLimit() { + return 0; + } + + @Override + public HfqlStatement getStatement() { + return myStatement; + } + + public static IHfqlExecutionResult withError(String theErrorMessage) { + StaticHfqlExecutionResult retVal = new StaticHfqlExecutionResult( + null, List.of("Error"), List.of(HfqlDataTypeEnum.STRING), List.of(List.of(theErrorMessage))); + retVal.myNextRowOffset = IHfqlExecutionResult.ROW_OFFSET_ERROR; + return retVal; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClient.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClient.java new file mode 100644 index 00000000000..a9df005161c --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClient.java @@ -0,0 +1,83 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.rest.client.impl.HttpBasicAuthInterceptor; +import ca.uhn.fhir.util.IoUtil; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.lang3.Validate; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.hl7.fhir.r4.model.Parameters; + +import java.sql.SQLException; +import java.util.concurrent.TimeUnit; + +import static ca.uhn.fhir.jpa.fql.util.HfqlConstants.DEFAULT_FETCH_SIZE; +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * This is the HTTP/REST client used by the JDBC driver to talk to the FHIR server. + * We don't use the HAPI FHIR REST client even though we're talking to a HAPI FHIR + * REST server because the operation we're calling returns CSV data instead of + * FHIR data. Instead, we just use the Apache HTTPClient. + *

+ * Ideally in the future I'd like to explore using JDK primitives instead of even + * using the Apache client or HAPI FHIR in order to reduce the dependencies required + * in the JDBC driver, but that can be a problem for the future. + */ +public class HfqlRestClient { + public static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT.withRecordSeparator('\n'); + private final String myBaseUrl; + private final CloseableHttpClient myClient; + + public HfqlRestClient(String theBaseUrl, String theUsername, String thePassword) { + myBaseUrl = theBaseUrl; + + PoolingHttpClientConnectionManager connectionManager = + new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + connectionManager.setMaxTotal(99); + connectionManager.setDefaultMaxPerRoute(99); + HttpClientBuilder httpClientBuilder = HttpClientBuilder.create() + .setConnectionManager(connectionManager) + .setMaxConnPerRoute(99); + if (isNotBlank(theUsername) && isNotBlank(thePassword)) { + httpClientBuilder.addInterceptorLast(new HttpBasicAuthInterceptor(theUsername, thePassword)); + } + myClient = httpClientBuilder.build(); + } + + public IHfqlExecutionResult execute( + Parameters theRequestParameters, boolean theSupportsContinuations, Integer theFetchSize) + throws SQLException { + Integer fetchSize = theFetchSize; + fetchSize = defaultIfNull(fetchSize, DEFAULT_FETCH_SIZE); + Validate.isTrue(fetchSize > 0, "theFetchSize must be a positive integer, got: %s", fetchSize); + return new RemoteHfqlExecutionResult( + theRequestParameters, myBaseUrl, myClient, fetchSize, theSupportsContinuations); + } + + public void close() { + IoUtil.closeQuietly(myClient); + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcConnection.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcConnection.java new file mode 100644 index 00000000000..79627b4450f --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcConnection.java @@ -0,0 +1,353 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.i18n.Msg; + +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Statement; +import java.sql.Struct; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; +import javax.annotation.Nonnull; + +class JdbcConnection implements Connection { + private final String myServerUrl; + private boolean myClosed; + private HfqlRestClient myClient; + private String myUsername; + private String myPassword; + + public JdbcConnection(String theServerUrl) { + myServerUrl = theServerUrl; + } + + @Override + public Statement createStatement() { + return new JdbcStatement(this); + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean getAutoCommit() { + return false; + } + + @Override + public void setAutoCommit(boolean autoCommit) { + // nothing + } + + @Override + public void commit() { + // nothing + } + + @Override + public void rollback() { + // nothing + } + + @Override + public void close() { + myClosed = true; + } + + @Override + public boolean isClosed() { + return myClosed; + } + + @Override + public DatabaseMetaData getMetaData() { + return new JdbcDatabaseMetadata(this, getClient()); + } + + @Override + public boolean isReadOnly() { + return true; + } + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getCatalog() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setCatalog(String catalog) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getTransactionIsolation() { + return Connection.TRANSACTION_READ_COMMITTED; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public SQLWarning getWarnings() { + return null; + } + + @Override + public void clearWarnings() { + // nothing + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) { + return createStatement(); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Map> getTypeMap() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setTypeMap(Map> map) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getHoldability() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setHoldability(int holdability) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Savepoint setSavepoint() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void rollback(Savepoint savepoint) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void releaseSavepoint(Savepoint savepoint) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) { + return createStatement(resultSetType, resultSetConcurrency); + } + + @Override + public PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Clob createClob() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Blob createBlob() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public NClob createNClob() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public SQLXML createSQLXML() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean isValid(int timeout) { + return true; + } + + @Override + public void setClientInfo(String name, String value) { + // ignore + } + + @Override + public String getClientInfo(String name) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Properties getClientInfo() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setClientInfo(Properties properties) { + // ignore + } + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getSchema() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setSchema(String schema) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void abort(Executor executor) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getNetworkTimeout() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public T unwrap(Class theInterface) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean isWrapperFor(Class theInterface) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + public HfqlRestClient getClient() { + if (myClient == null) { + myClient = new HfqlRestClient(myServerUrl, myUsername, myPassword); + } + return myClient; + } + + public void setUsername(String theUsername) { + myUsername = theUsername; + } + + public void setPassword(String thePassword) { + myPassword = thePassword; + } + + @Nonnull + static SQLException newSqlExceptionForUnsupportedOperation() { + return new SQLException(Msg.code(2394) + "This JDBC method is not yet supported by the HFQL JDBC Driver"); + } + + @Nonnull + static SQLFeatureNotSupportedException newSqlExceptionForFeatureNotSupported() { + return new SQLFeatureNotSupportedException( + Msg.code(2398) + "This JDBC method is not yet supported by the HFQL JDBC Driver"); + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDatabaseMetadata.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDatabaseMetadata.java new file mode 100644 index 00000000000..95f1d30e028 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDatabaseMetadata.java @@ -0,0 +1,956 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.util.VersionUtil; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.Parameters; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; + +import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForUnsupportedOperation; + +public class JdbcDatabaseMetadata implements DatabaseMetaData { + private final Connection myConnection; + private final HfqlRestClient myRestClient; + + public JdbcDatabaseMetadata(Connection theConnection, HfqlRestClient theRestClient) { + myConnection = theConnection; + myRestClient = theRestClient; + } + + @Override + public boolean allProceduresAreCallable() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean allTablesAreSelectable() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getURL() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getUserName() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean isReadOnly() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean nullsAreSortedHigh() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean nullsAreSortedLow() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean nullsAreSortedAtStart() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean nullsAreSortedAtEnd() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getDatabaseProductName() throws SQLException { + return "HAPI FHIR"; + } + + @Override + public String getDatabaseProductVersion() throws SQLException { + return VersionUtil.getVersion(); + } + + @Override + public String getDriverName() throws SQLException { + return "HAPI FHIR FQL JDBC"; + } + + @Override + public String getDriverVersion() throws SQLException { + return VersionUtil.getVersion(); + } + + @Override + public int getDriverMajorVersion() { + return 1; + } + + @Override + public int getDriverMinorVersion() { + return 1; + } + + @Override + public boolean usesLocalFiles() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean usesLocalFilePerTable() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMixedCaseIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesUpperCaseIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesLowerCaseIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesMixedCaseIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getIdentifierQuoteString() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getSQLKeywords() throws SQLException { + return ""; + } + + @Override + public String getNumericFunctions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getStringFunctions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getSystemFunctions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getTimeDateFunctions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getSearchStringEscape() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getExtraNameCharacters() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsAlterTableWithAddColumn() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsAlterTableWithDropColumn() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsColumnAliasing() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean nullPlusNonNullIsNull() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsConvert() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsConvert(int fromType, int toType) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsTableCorrelationNames() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsDifferentTableCorrelationNames() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsExpressionsInOrderBy() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOrderByUnrelated() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsGroupBy() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsGroupByUnrelated() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsGroupByBeyondSelect() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsLikeEscapeClause() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMultipleResultSets() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMultipleTransactions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsNonNullableColumns() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMinimumSQLGrammar() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCoreSQLGrammar() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsExtendedSQLGrammar() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsANSI92EntryLevelSQL() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsANSI92IntermediateSQL() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsANSI92FullSQL() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsIntegrityEnhancementFacility() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOuterJoins() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsFullOuterJoins() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsLimitedOuterJoins() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getSchemaTerm() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getProcedureTerm() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getCatalogTerm() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean isCatalogAtStart() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getCatalogSeparator() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSchemasInDataManipulation() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSchemasInProcedureCalls() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSchemasInTableDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSchemasInIndexDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCatalogsInDataManipulation() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCatalogsInProcedureCalls() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCatalogsInTableDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsPositionedDelete() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsPositionedUpdate() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSelectForUpdate() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsStoredProcedures() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSubqueriesInComparisons() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSubqueriesInExists() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSubqueriesInIns() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsSubqueriesInQuantifieds() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsCorrelatedSubqueries() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsUnion() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsUnionAll() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxBinaryLiteralLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxCharLiteralLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnsInGroupBy() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnsInIndex() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnsInOrderBy() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnsInSelect() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxColumnsInTable() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxConnections() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxCursorNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxIndexLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxSchemaNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxProcedureNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxCatalogNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxRowSize() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxStatementLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxStatements() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxTableNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxTablesInSelect() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getMaxUserNameLength() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getDefaultTransactionIsolation() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsTransactions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsTransactionIsolationLevel(int level) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getProcedureColumns( + String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) + throws SQLException { + Parameters input = new Parameters(); + input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES)); + IHfqlExecutionResult outcome = myRestClient.execute(input, false, null); + return new JdbcResultSet(outcome); + } + + @Override + public ResultSet getSchemas() throws SQLException { + // Empty result set + return new JdbcResultSet(); + } + + @Override + public ResultSet getCatalogs() throws SQLException { + // Empty result set + return new JdbcResultSet(); + } + + @Override + public ResultSet getTableTypes() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) + throws SQLException, SQLException { + Parameters input = new Parameters(); + input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS)); + IHfqlExecutionResult outcome = myRestClient.execute(input, false, null); + return new JdbcResultSet(outcome); + } + + @Override + public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { + return new JdbcResultSet(); + } + + @Override + public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { + return new JdbcResultSet(); + } + + @Override + public ResultSet getCrossReference( + String parentCatalog, + String parentSchema, + String parentTable, + String foreignCatalog, + String foreignSchema, + String foreignTable) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getTypeInfo() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsResultSetType(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean ownUpdatesAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean ownDeletesAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean ownInsertsAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean othersUpdatesAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean othersDeletesAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean othersInsertsAreVisible(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean updatesAreDetected(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean deletesAreDetected(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean insertsAreDetected(int type) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsBatchUpdates() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Connection getConnection() throws SQLException { + return myConnection; + } + + @Override + public boolean supportsSavepoints() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsNamedParameters() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsMultipleOpenResults() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsGetGeneratedKeys() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getAttributes( + String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsResultSetHoldability(int holdability) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getResultSetHoldability() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getDatabaseMajorVersion() throws SQLException { + return Integer.parseInt(VersionUtil.getVersion().split("\\.")[0]); + } + + @Override + public int getDatabaseMinorVersion() throws SQLException { + return Integer.parseInt(VersionUtil.getVersion().split("\\.")[1]); + } + + @Override + public int getJDBCMajorVersion() throws SQLException { + return Integer.parseInt(VersionUtil.getVersion().split("\\.")[0]); + } + + @Override + public int getJDBCMinorVersion() throws SQLException { + return Integer.parseInt(VersionUtil.getVersion().split("\\.")[1]); + } + + @Override + public int getSQLStateType() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean locatorsUpdateCopy() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsStatementPooling() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public RowIdLifetime getRowIdLifetime() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getClientInfoProperties() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getFunctionColumns( + String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getPseudoColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) + throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean generatedKeyAlwaysReturned() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public T unwrap(Class theInterface) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean isWrapperFor(Class theInterface) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriver.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriver.java new file mode 100644 index 00000000000..e4a0e8767e9 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriver.java @@ -0,0 +1,117 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import java.io.PrintStream; +import java.sql.*; +import java.util.Properties; +import java.util.logging.Logger; + +/** + * This is the JDBC driver class for the HFQL driver. It is intended to be + * imported into a JDBC-compliant database tool, and implements the basic + * functionality required to introspect the "database" and execute queries. + *

+ * Connections returned by this driver are only semi-stateful. In a normal + * JDBC driver, each connection represents an open and persistent TCP + * connection to the server with shared state between the client and the + * server, but in this driver we keep most of the state in the client. When + * a query is executed it is translated into a FHIR search (with further + * processing on the search results happening in + * {@link ca.uhn.fhir.jpa.fql.executor.HfqlExecutor}). + */ +public class JdbcDriver implements Driver { + private static final JdbcDriver INSTANCE = new JdbcDriver(); + public static final String URL_PREFIX = "jdbc:hapifhirql:"; + private static boolean ourRegistered; + + static { + load(); + } + + @Override + public Connection connect(String theUrl, Properties theProperties) throws SQLException { + String serverUrl = theUrl.substring(URL_PREFIX.length()); + + JdbcConnection connection = new JdbcConnection(serverUrl); + connection.setUsername(theProperties.getProperty("user", null)); + connection.setPassword(theProperties.getProperty("password", null)); + return connection; + } + + @Override + public boolean acceptsURL(String theUrl) { + return theUrl.startsWith(URL_PREFIX); + } + + @Override + public DriverPropertyInfo[] getPropertyInfo(String theUrl, Properties theInfo) { + return new DriverPropertyInfo[0]; + } + + @Override + public int getMajorVersion() { + return 1; + } + + @Override + public int getMinorVersion() { + return 0; + } + + @Override + public boolean jdbcCompliant() { + return false; + } + + @Override + public Logger getParentLogger() { + return Logger.getLogger(getClass().getPackageName()); + } + + public static synchronized Driver load() { + try { + if (!ourRegistered) { + ourRegistered = true; + DriverManager.registerDriver(INSTANCE); + } + } catch (SQLException e) { + logException(e); + } + + return INSTANCE; + } + + private static void logException(SQLException e) { + PrintStream out = System.out; + e.printStackTrace(out); + } + + public static synchronized void unload() { + try { + if (ourRegistered) { + ourRegistered = false; + DriverManager.deregisterDriver(INSTANCE); + } + } catch (SQLException e) { + logException(e); + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcResultSet.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcResultSet.java new file mode 100644 index 00000000000..f33bea58ed8 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcResultSet.java @@ -0,0 +1,1273 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.executor.StaticHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import org.apache.commons.lang3.StringUtils; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForFeatureNotSupported; +import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForUnsupportedOperation; +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; + +/** + * JDBC ResultSet for HFQL which is backed by a {@link IHfqlExecutionResult} instance + * (typically a {@link RemoteHfqlExecutionResult} that is streaming results from the + * server). + */ +class JdbcResultSet implements ResultSet { + + /** + * @see https://en.wikipedia.org/wiki/SQLSTATE + */ + public static final String SQL_STATE_CODE_0F001_LOCATOR_EXCEPTION = "0F001"; + + private final IHfqlExecutionResult myResult; + private final Statement myStatement; + private List myNextRow; + private final JdbcResultSetMetadata myMetadata; + private final Map myColumnNameToIndex; + private int myRowCount; + private Object myLastValue; + + /** + * Empty Constructor + */ + public JdbcResultSet() { + this(new StaticHfqlExecutionResult(null)); + } + + /** + * Constructor + */ + public JdbcResultSet(IHfqlExecutionResult theResult) { + this(theResult, null); + } + + /** + * Constructor + */ + public JdbcResultSet(IHfqlExecutionResult theResult, Statement theStatement) { + myStatement = theStatement; + myResult = theResult; + myMetadata = new JdbcResultSetMetadata(); + myColumnNameToIndex = new HashMap<>(); + List selectClauses = myResult.getStatement().getSelectClauses(); + for (int i = 0; i < selectClauses.size(); i++) { + myColumnNameToIndex.put(selectClauses.get(i).getAlias(), i + 1); + } + } + + @Override + public boolean next() throws SQLException { + if (myResult.hasNext()) { + IHfqlExecutionResult.Row nextRow = myResult.getNextRow(); + if (nextRow.getRowOffset() == IHfqlExecutionResult.ROW_OFFSET_ERROR) { + String errorMessage = nextRow.getRowValues().get(0).toString(); + throw new SQLException(Msg.code(2395) + errorMessage, SQL_STATE_CODE_0F001_LOCATOR_EXCEPTION, -1); + } + + myNextRow = nextRow.getRowValues(); + myRowCount++; + return true; + } + return false; + } + + @Override + public void close() throws SQLException { + myResult.close(); + } + + @Override + public boolean wasNull() { + return myLastValue == null; + } + + private void validateColumnIndex(int columnIndex) throws SQLException { + if (columnIndex <= 0) { + throw new SQLException(Msg.code(2396) + "Invalid column index: " + columnIndex); + } + if (columnIndex > myResult.getStatement().getSelectClauses().size()) { + throw new SQLException(Msg.code(2397) + "Invalid column index: " + columnIndex); + } + } + + @Override + public String getString(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + String retVal = (String) myNextRow.get(columnIndex - 1); + myLastValue = retVal; + return retVal; + } + + @Override + public int getInt(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Integer retVal = (Integer) myNextRow.get(columnIndex - 1); + myLastValue = retVal; + retVal = defaultIfNull(retVal, 0); + return retVal; + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Boolean retVal = (Boolean) myNextRow.get(columnIndex - 1); + myLastValue = retVal; + retVal = defaultIfNull(retVal, Boolean.FALSE); + return retVal; + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public short getShort(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public long getLong(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Long retVal = (Long) myNextRow.get(columnIndex - 1); + myLastValue = retVal; + retVal = defaultIfNull(retVal, 0L); + return retVal; + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + BigDecimal retVal = getBigDecimal(columnIndex); + return retVal != null ? retVal.floatValue() : 0f; + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + BigDecimal retVal = getBigDecimal(columnIndex); + return retVal != null ? retVal.doubleValue() : 0d; + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + return getBigDecimal(columnIndex); + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Object retVal = myNextRow.get(columnIndex - 1); + if (retVal != null) { + retVal = new Date(((java.util.Date) retVal).getTime()); + } + myLastValue = retVal; + return (Date) retVal; + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Object retVal = myNextRow.get(columnIndex - 1); + if (retVal != null) { + String time = (String) retVal; + if (StringUtils.countMatches(time, ':') == 1) { + time = time + ":00"; + } + int pointIdx = time.indexOf('.'); + if (pointIdx != -1) { + time = time.substring(0, pointIdx); + } + retVal = Time.valueOf(time); + } + myLastValue = retVal; + return (Time) retVal; + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + Object retVal = myNextRow.get(columnIndex - 1); + if (retVal != null) { + retVal = new Timestamp(((java.util.Date) retVal).getTime()); + } + myLastValue = retVal; + return (Timestamp) retVal; + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public String getString(String columnLabel) throws SQLException { + return getString(findColumn(columnLabel)); + } + + @Override + public int getInt(String columnLabel) throws SQLException { + return getInt(findColumn(columnLabel)); + } + + @Override + public boolean getBoolean(String columnLabel) throws SQLException { + return getBoolean(findColumn(columnLabel)); + } + + @Override + public byte getByte(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public short getShort(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public long getLong(String columnLabel) throws SQLException { + return getLong(findColumn(columnLabel)); + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + return getFloat(findColumn(columnLabel)); + } + + @Override + public double getDouble(String columnLabel) throws SQLException { + return getDouble(findColumn(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + return getBigDecimal(findColumn(columnLabel)); + } + + @Override + public byte[] getBytes(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Date getDate(String columnLabel) throws SQLException { + return getDate(findColumn(columnLabel)); + } + + @Override + public Time getTime(String columnLabel) throws SQLException { + return getTime(findColumn(columnLabel)); + } + + @Override + public Timestamp getTimestamp(String columnLabel) throws SQLException { + return getTimestamp(findColumn(columnLabel)); + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void clearWarnings() { + // ignored + } + + @Override + public String getCursorName() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSetMetaData getMetaData() { + return myMetadata; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + switch (myResult.getStatement().getSelectClauses().get(columnIndex - 1).getDataType()) { + case INTEGER: + return getInt(columnIndex); + case BOOLEAN: + return getBoolean(columnIndex); + case DATE: + return getDate(columnIndex); + case TIMESTAMP: + return getTimestamp(columnIndex); + case LONGINT: + return getLong(columnIndex); + case TIME: + return getTime(columnIndex); + case DECIMAL: + return getBigDecimal(columnIndex); + case STRING: + case JSON: + default: + return getString(columnIndex); + } + } + + @Override + public Object getObject(String columnLabel) throws SQLException { + return getObject(findColumn(columnLabel)); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + Integer retVal = myColumnNameToIndex.get(columnLabel); + if (retVal != null) { + return retVal; + } + throw new SQLException( + Msg.code(2416) + "Unknown column: " + columnLabel + ". Valid columns: " + myColumnNameToIndex.keySet()); + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + validateColumnIndex(columnIndex); + BigDecimal retVal = (BigDecimal) myNextRow.get(columnIndex - 1); + myLastValue = retVal; + return retVal; + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) throws SQLException { + return getBigDecimal(findColumn(columnLabel)); + } + + @Override + public boolean isBeforeFirst() { + return myNextRow == null; + } + + @Override + public boolean isAfterLast() { + return !myResult.hasNext(); + } + + @Override + public boolean isFirst() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public boolean isLast() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void beforeFirst() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void afterLast() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public boolean first() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public boolean last() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public int getRow() { + return myRowCount; + } + + @Override + public boolean absolute(int row) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public boolean previous() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public int getFetchDirection() { + return ResultSet.FETCH_FORWARD; + } + + @Override + public void setFetchDirection(int direction) { + // ignored + } + + @Override + public int getFetchSize() { + return 0; + } + + @Override + public void setFetchSize(int rows) { + // ignored + } + + @Override + public int getType() throws SQLException { + return ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() { + return ResultSet.CONCUR_READ_ONLY; + } + + @Override + public boolean rowUpdated() { + return false; + } + + @Override + public boolean rowInserted() { + return false; + } + + @Override + public boolean rowDeleted() { + return false; + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void insertRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void deleteRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void refreshRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Statement getStatement() throws SQLException { + return myStatement; + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public int getHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + public boolean isClosed() { + return myResult.isClosed(); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public String getNString(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public String getNString(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + throw newSqlExceptionForFeatureNotSupported(); + } + + @Override + public T unwrap(Class theInterface) { + return null; + } + + @Override + public boolean isWrapperFor(Class theInterface) { + return false; + } + + private class JdbcResultSetMetadata implements ResultSetMetaData { + @Override + public int getColumnCount() { + return myColumnNameToIndex.size(); + } + + @Override + public boolean isAutoIncrement(int column) { + return false; + } + + @Override + public boolean isCaseSensitive(int column) { + return false; + } + + @Override + public boolean isSearchable(int column) { + return false; + } + + @Override + public boolean isCurrency(int column) { + return false; + } + + @Override + public int isNullable(int column) { + return columnNullableUnknown; + } + + @Override + public boolean isSigned(int column) { + return false; + } + + @Override + public int getColumnDisplaySize(int column) { + return 0; + } + + @Override + public String getColumnLabel(int column) { + return myResult.getStatement().getSelectClauses().get(column - 1).getAlias(); + } + + @Override + public String getColumnName(int column) { + return getColumnLabel(column); + } + + @Override + public String getSchemaName(int column) { + return null; + } + + @Override + public int getPrecision(int column) { + return 0; + } + + @Override + public int getScale(int column) { + return 0; + } + + @Override + public String getTableName(int column) { + return null; + } + + @Override + public String getCatalogName(int column) { + return null; + } + + @Override + public int getColumnType(int column) { + return myResult.getStatement() + .getSelectClauses() + .get(column - 1) + .getDataType() + .getSqlType(); + } + + @Override + public String getColumnTypeName(int column) { + return myResult.getStatement() + .getSelectClauses() + .get(column - 1) + .getDataType() + .name(); + } + + @Override + public boolean isReadOnly(int column) { + return true; + } + + @Override + public boolean isWritable(int column) { + return false; + } + + @Override + public boolean isDefinitelyWritable(int column) { + return false; + } + + @Override + public String getColumnClassName(int column) { + return String.class.getName(); + } + + @Override + public T unwrap(Class theInterface) { + return null; + } + + @Override + public boolean isWrapperFor(Class theInterface) { + return false; + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcStatement.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcStatement.java new file mode 100644 index 00000000000..2905055983b --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcStatement.java @@ -0,0 +1,276 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import org.hl7.fhir.r4.model.Parameters; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; + +import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForUnsupportedOperation; + +class JdbcStatement implements Statement { + private final JdbcConnection myConnection; + private int myMaxRows; + private int myFetchSize = HfqlConstants.DEFAULT_FETCH_SIZE; + private JdbcResultSet myResultSet; + + public JdbcStatement(JdbcConnection theConnection) { + myConnection = theConnection; + } + + @Override + public ResultSet executeQuery(String sql) throws SQLException { + execute(sql); + return getResultSet(); + } + + @Override + public int executeUpdate(String sql) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void close() { + // ignored + } + + @Override + public int getMaxFieldSize() { + return 0; + } + + @Override + public void setMaxFieldSize(int max) { + // ignored + } + + @Override + public int getMaxRows() { + return myMaxRows; + } + + @Override + public void setMaxRows(int theMaxRows) { + myMaxRows = theMaxRows; + } + + @Override + public void setEscapeProcessing(boolean enable) { + // ignored + } + + @Override + public int getQueryTimeout() { + return 0; + } + + @Override + public void setQueryTimeout(int seconds) { + // ignored + } + + @Override + public void cancel() { + // ignored + } + + @Override + public SQLWarning getWarnings() { + return null; + } + + @Override + public void clearWarnings() { + // ignored + } + + @Override + public void setCursorName(String name) { + // ignored + } + + @Override + public boolean execute(String sql) throws SQLException { + Integer limit = null; + if (getMaxRows() > 0) { + limit = getMaxRows(); + } + + int fetchSize = myFetchSize; + + Parameters input = HfqlRestProvider.newQueryRequestParameters(sql, limit, fetchSize); + IHfqlExecutionResult result = myConnection.getClient().execute(input, true, getFetchSize()); + + myResultSet = new JdbcResultSet(result, this); + return true; + } + + @Override + public ResultSet getResultSet() { + return myResultSet; + } + + @Override + public int getUpdateCount() { + return 0; + } + + @Override + public boolean getMoreResults() { + return false; + } + + @Override + public int getFetchDirection() { + return ResultSet.FETCH_FORWARD; + } + + @Override + public void setFetchDirection(int direction) { + // ignored + } + + @Override + public int getFetchSize() { + return myFetchSize; + } + + @Override + public void setFetchSize(int theFetchSize) { + myFetchSize = theFetchSize; + } + + @Override + public int getResultSetConcurrency() { + return ResultSet.CONCUR_READ_ONLY; + } + + @Override + public int getResultSetType() { + return ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public void addBatch(String sql) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public void clearBatch() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int[] executeBatch() { + return new int[0]; + } + + @Override + public Connection getConnection() { + return myConnection; + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + throw newSqlExceptionForUnsupportedOperation(); + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + public boolean isClosed() { + return false; + } + + @Override + public boolean isPoolable() { + return false; + } + + @Override + public void setPoolable(boolean thePoolable) { + // ignored + } + + @Override + public void closeOnCompletion() { + // ignored + } + + @Override + public boolean isCloseOnCompletion() { + return false; + } + + @Override + public T unwrap(Class theInterface) { + return null; + } + + @Override + public boolean isWrapperFor(Class theInterface) { + return false; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/RemoteHfqlExecutionResult.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/RemoteHfqlExecutionResult.java new file mode 100644 index 00000000000..5b2c65b31b7 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/RemoteHfqlExecutionResult.java @@ -0,0 +1,302 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.client.apache.ResourceEntity; +import ca.uhn.fhir.rest.client.api.IGenericClient; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.util.IoUtil; +import ca.uhn.fhir.util.JsonUtil; +import ca.uhn.fhir.util.ValidateUtil; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.Validate; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.CloseableHttpClient; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.Binary; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.DateTimeType; +import org.hl7.fhir.r4.model.DateType; +import org.hl7.fhir.r4.model.DecimalType; +import org.hl7.fhir.r4.model.IntegerType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import javax.servlet.http.HttpServletResponse; + +import static ca.uhn.fhir.jpa.fql.util.HfqlConstants.PROTOCOL_VERSION; +import static org.apache.commons.lang3.StringUtils.defaultIfBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * This implementation of {@link IHfqlExecutionResult} is intended to be used within + * a remote client (ie a JDBC driver). It executes a call to a FHIR server, executing + * the {@link ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider#executeFql(IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, RequestDetails, HttpServletResponse)} + * operation, parses the response and returns it. + * + * @see IHfqlExecutionResult for more information about the purpose of this class + */ +public class RemoteHfqlExecutionResult implements IHfqlExecutionResult { + private final boolean mySupportsContinuations; + private final String myBaseUrl; + private final CloseableHttpClient myClient; + private final int myFetchSize; + private String mySearchId; + private int myLimit; + private InputStreamReader myReader; + private Iterator myIterator; + private int myCurrentFetchCount; + private CloseableHttpResponse myRequest; + private int myLastRowNumber; + private boolean myExhausted; + private HfqlStatement myStatement; + + public RemoteHfqlExecutionResult( + Parameters theRequestParameters, + String theBaseUrl, + CloseableHttpClient theClient, + int theFetchSize, + boolean theSupportsContinuations) + throws SQLException { + myBaseUrl = theBaseUrl; + myClient = theClient; + myFetchSize = theFetchSize; + mySupportsContinuations = theSupportsContinuations; + + HttpPost post = new HttpPost(myBaseUrl + "/" + HfqlConstants.HFQL_EXECUTE); + post.setEntity(new ResourceEntity(FhirContext.forR4Cached(), theRequestParameters)); + try { + myRequest = myClient.execute(post); + validateResponse(); + myReader = new InputStreamReader(myRequest.getEntity().getContent(), StandardCharsets.UTF_8); + CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT); + myIterator = csvParser.iterator(); + readHeaderRows(true); + } catch (IOException e) { + throw new SQLException(Msg.code(2400) + e.getMessage(), e); + } + } + + public RemoteHfqlExecutionResult(Parameters theRequestParameters, IGenericClient theClient) throws IOException { + myBaseUrl = null; + myClient = null; + myFetchSize = 100; + mySupportsContinuations = false; + Binary response = theClient + .operation() + .onServer() + .named(HfqlConstants.HFQL_EXECUTE) + .withParameters(theRequestParameters) + .returnResourceType(Binary.class) + .execute(); + String contentType = defaultIfBlank(response.getContentType(), ""); + if (contentType.contains(";")) { + contentType = contentType.substring(0, contentType.indexOf(';')); + } + contentType = contentType.trim(); + Validate.isTrue(Constants.CT_TEXT_CSV.equals(contentType), "Unexpected content-type: %s", contentType); + + myReader = new InputStreamReader(new ByteArrayInputStream(response.getContent()), StandardCharsets.UTF_8); + CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT); + myIterator = csvParser.iterator(); + readHeaderRows(true); + } + + private void validateResponse() { + Validate.isTrue( + myRequest.getStatusLine().getStatusCode() == 200, + "Server returned wrong status: %d", + myRequest.getStatusLine().getStatusCode()); + } + + private void readHeaderRows(boolean theFirstPage) { + // Protocol version + CSVRecord protocolVersionRow = myIterator.next(); + String protocolVersion = protocolVersionRow.get(0); + ValidateUtil.isTrueOrThrowInvalidRequest( + PROTOCOL_VERSION.equals(protocolVersion), + "Wrong protocol version, expected %s but got %s", + PROTOCOL_VERSION, + protocolVersion); + + // Search ID, Limit, Parsed Statement + CSVRecord searchIdRow = myIterator.next(); + mySearchId = searchIdRow.get(0); + myLimit = Integer.parseInt(searchIdRow.get(1)); + String statementJsonString = searchIdRow.get(2); + if (theFirstPage && isNotBlank(statementJsonString)) { + myStatement = JsonUtil.deserialize(statementJsonString, HfqlStatement.class); + } + myCurrentFetchCount = 0; + } + + @Override + public boolean hasNext() { + if (myExhausted) { + return false; + } + + boolean hasNext = myIterator.hasNext(); + if (!hasNext && myCurrentFetchCount < myFetchSize) { + myExhausted = true; + close(); + } else if (!hasNext) { + close(); + if (mySupportsContinuations) { + hasNext = executeContinuationSearch(); + } + } + + return hasNext; + } + + @Override + public Row getNextRow() { + Validate.isTrue(!myExhausted, "Search is exhausted. This is a bug."); + + List columnValues = new ArrayList<>(); + boolean first = true; + CSVRecord nextRecord = myIterator.next(); + myCurrentFetchCount++; + + for (String next : nextRecord) { + if (first) { + first = false; + myLastRowNumber = Integer.parseInt(next); + continue; + } + columnValues.add(next); + } + + for (int i = 0; i < columnValues.size(); i++) { + String existingValue = (String) columnValues.get(i); + if (isNotBlank(existingValue)) { + Object newValue = null; + switch (myStatement.getSelectClauses().get(i).getDataType()) { + case STRING: + case JSON: + // No action + break; + case TIME: + // No action (we represent times as strings internally) + break; + case INTEGER: + newValue = Integer.parseInt(existingValue); + break; + case BOOLEAN: + newValue = Boolean.parseBoolean(existingValue); + break; + case DATE: + DateType dateType = new DateType(); + dateType.setValueAsString(existingValue); + newValue = dateType.getValue(); + break; + case TIMESTAMP: + DateTimeType dateTimeType = new DateTimeType(); + dateTimeType.setValueAsString(existingValue); + newValue = dateTimeType.getValue(); + break; + case LONGINT: + newValue = Long.parseLong(existingValue); + break; + case DECIMAL: + newValue = new DecimalType(existingValue).getValue(); + break; + } + if (newValue != null) { + columnValues.set(i, newValue); + } + } else { + columnValues.set(i, null); + } + } + + return new Row(myLastRowNumber, columnValues); + } + + private boolean executeContinuationSearch() { + boolean hasNext; + HttpPost post = new HttpPost(myBaseUrl + "/" + HfqlConstants.HFQL_EXECUTE); + Parameters input = new Parameters(); + input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION)); + input.addParameter(HfqlConstants.PARAM_CONTINUATION, new StringType(mySearchId)); + input.addParameter(HfqlConstants.PARAM_OFFSET, new IntegerType(myLastRowNumber + 1)); + input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(myLimit)); + input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(myFetchSize)); + input.addParameter(HfqlConstants.PARAM_STATEMENT, new StringType(JsonUtil.serialize(myStatement, false))); + post.setEntity(new ResourceEntity(FhirContext.forR4Cached(), input)); + try { + myRequest = myClient.execute(post); + validateResponse(); + myReader = new InputStreamReader(myRequest.getEntity().getContent(), StandardCharsets.UTF_8); + CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT); + myIterator = csvParser.iterator(); + readHeaderRows(false); + } catch (IOException e) { + throw new InternalErrorException(Msg.code(2399) + e.getMessage(), e); + } + hasNext = myIterator.hasNext(); + return hasNext; + } + + @Override + public boolean isClosed() { + return myRequest == null; + } + + @Override + public void close() { + IoUtil.closeQuietly(myReader); + IoUtil.closeQuietly(myRequest); + myRequest = null; + } + + @Override + public String getSearchId() { + return mySearchId; + } + + @Override + public int getLimit() { + return myLimit; + } + + @Override + public HfqlStatement getStatement() { + return myStatement; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParser.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParser.java new file mode 100644 index 00000000000..821fda283d2 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParser.java @@ -0,0 +1,218 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.context.*; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import org.apache.commons.text.WordUtils; + +import java.util.Map; +import javax.annotation.Nullable; + +import static java.util.Map.entry; + +public class HfqlFhirPathParser { + + private static final Map FHIR_DATATYPE_TO_FQL_DATATYPE; + + static { + FHIR_DATATYPE_TO_FQL_DATATYPE = Map.ofEntries( + entry("base64Binary", HfqlDataTypeEnum.STRING), + entry("boolean", HfqlDataTypeEnum.BOOLEAN), + entry("canonical", HfqlDataTypeEnum.STRING), + entry("code", HfqlDataTypeEnum.STRING), + entry("date", HfqlDataTypeEnum.DATE), + entry("dateTime", HfqlDataTypeEnum.TIMESTAMP), + entry("decimal", HfqlDataTypeEnum.DECIMAL), + entry("id", HfqlDataTypeEnum.STRING), + entry("instant", HfqlDataTypeEnum.TIMESTAMP), + entry("integer", HfqlDataTypeEnum.INTEGER), + entry("integer64", HfqlDataTypeEnum.LONGINT), + entry("markdown", HfqlDataTypeEnum.STRING), + entry("oid", HfqlDataTypeEnum.STRING), + entry("positiveInt", HfqlDataTypeEnum.INTEGER), + entry("string", HfqlDataTypeEnum.STRING), + entry("time", HfqlDataTypeEnum.TIME), + entry("unsignedInt", HfqlDataTypeEnum.INTEGER), + entry("uri", HfqlDataTypeEnum.STRING), + entry("url", HfqlDataTypeEnum.STRING), + entry("uuid", HfqlDataTypeEnum.STRING), + entry("xhtml", HfqlDataTypeEnum.STRING)); + } + + private final FhirContext myFhirContext; + + /** + * Constructor + */ + public HfqlFhirPathParser(FhirContext theFhirContext) { + myFhirContext = theFhirContext; + } + + /** + * Given a FHIRPath expression (and a resource type that it applies to), this + * class tries to determine the {@link HfqlDataTypeEnum HFQL Data Type} that the + * values will be when the expression is resolved. This is not nearly foolproof, + * so it is a best effort determination. If the type is ambiguous or can't be determined, + * this method will return {@link HfqlDataTypeEnum#STRING}. + */ + public HfqlDataTypeEnum determineDatatypeForPath(String theResourceType, String theFhirPath) { + + BaseRuntimeElementCompositeDefinition currentElementDefinition = + myFhirContext.getResourceDefinition(theResourceType); + RuntimePrimitiveDatatypeDefinition leafDefinition = null; + + HfqlLexer lexer = new HfqlLexer(theFhirPath); + boolean firstToken = true; + boolean potentiallyRepeatableAtCurrentPath = false; + while (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) { + HfqlLexerToken nextToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART); + String nextTokenString = nextToken.getToken(); + + // If the first token is the resource type, we can ignore that + if (firstToken) { + firstToken = false; + if (nextTokenString.equals(theResourceType)) { + continue; + } + } + + if (".".equals(nextTokenString)) { + continue; + } + + /* + * If there's a round bracket than this is a function name and not an + * element name. In this case we'll just move on to the next element. + * We're making the naive assumption here that the function is a filtering + * function such as in "Patient.identifier.where(system='http://foo').value" + * so that we can just skip the filter function and continue to navigate + * the element names as though the filter wasn't there. This is probably + * not going to hold true always, but it should be good enough for our + * basic type guessing. + * + * One specific case though that we deal with is the functions that take + * a collection and reduce it to a single element. In that case we assume + * we can't have a collection. + */ + if (nextTokenString.contains("(")) { + String keyword = nextToken.asKeyword(); + switch (keyword) { + case "FIRST()": + case "LAST()": + potentiallyRepeatableAtCurrentPath = false; + break; + case "TOINTEGER()": + if (!lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) { + return HfqlDataTypeEnum.INTEGER; + } + break; + } + continue; + } + + /* + * If the element has an offset operator (e.g. "name[3]") then + * ignore it since we only care about the elemt name part. + */ + boolean hasArrayIndex = false; + int leftSquareBracketIndex = nextTokenString.indexOf('['); + if (leftSquareBracketIndex != -1 && nextTokenString.endsWith("]")) { + nextTokenString = nextTokenString.substring(0, leftSquareBracketIndex); + hasArrayIndex = true; + } + + BaseRuntimeChildDefinition childDefForNode = currentElementDefinition.getChildByName(nextTokenString); + if (childDefForNode == null) { + childDefForNode = currentElementDefinition.getChildByName(nextTokenString + "[x]"); + if (childDefForNode != null) { + if (lexer.peekNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART) + .getToken() + .equals(".")) { + lexer.consumeNextToken(); + } + if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) { + String token = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART) + .getToken(); + if (token.startsWith("ofType(") && token.endsWith(")")) { + String type = token.substring(7, token.length() - 1); + nextTokenString = nextTokenString + WordUtils.capitalize(type); + } + } + } + } + + if (childDefForNode != null) { + + if (childDefForNode.getMax() != 1 && !hasArrayIndex) { + potentiallyRepeatableAtCurrentPath = true; + } + + if (childDefForNode.getValidChildNames().contains(nextTokenString)) { + BaseRuntimeElementDefinition elementDefForNode = childDefForNode.getChildByName(nextTokenString); + if (elementDefForNode != null) { + if (elementDefForNode instanceof BaseRuntimeElementCompositeDefinition) { + currentElementDefinition = (BaseRuntimeElementCompositeDefinition) elementDefForNode; + continue; + } else if (elementDefForNode instanceof RuntimePrimitiveDatatypeDefinition) { + leafDefinition = (RuntimePrimitiveDatatypeDefinition) elementDefForNode; + continue; + } + } + } + } + + break; + } + + if (potentiallyRepeatableAtCurrentPath) { + return HfqlDataTypeEnum.JSON; + } + + if (leafDefinition != null) { + String typeName = leafDefinition.getName(); + return getHfqlDataTypeForFhirType(typeName); + } + + return null; + } + + static HfqlDataTypeEnum getHfqlDataTypeForFhirType(String theTypeName) { + return FHIR_DATATYPE_TO_FQL_DATATYPE.get(theTypeName); + } + + @Nullable + private static String getNextFhirPathPartTokenOrNull(HfqlLexer lexer) { + String finalToken = null; + if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) { + finalToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART) + .getToken(); + } + + if (".".equals(finalToken)) { + if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) { + finalToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART) + .getToken(); + } + } + + return finalToken; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexer.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexer.java new file mode 100644 index 00000000000..b8f4b850a53 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexer.java @@ -0,0 +1,255 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.apache.commons.lang3.Validate; + +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nonnull; + +import static java.lang.Character.isWhitespace; + +/** + * Just a simple lexer used to parse HFQL queries and FHIRPath expressions. The lexer + * returns a stream of tokens and can use different lexing rules depending on the + * {@link HfqlLexerOptions} passed in. + */ +class HfqlLexer { + + private final char[] myInput; + private final StringBuilder myBuffer = new StringBuilder(); + private int myPosition = 0; + private int myLine = 0; + private int myColumn = 0; + private int myParenDepth = 0; + private LexerState myState = LexerState.INITIAL; + private String myNextToken; + private int myNextTokenLine; + private int myNextTokenColumn; + private int myNextTokenStartPosition; + private HfqlLexerOptions myNextTokenOptions; + + public HfqlLexer(String theInput) { + myInput = theInput.toCharArray(); + } + + /** + * Returns null when no tokens remain + */ + @Nonnull + public HfqlLexerToken getNextToken() { + return getNextToken(HfqlLexerOptions.HFQL_TOKEN); + } + + /** + * Returns null when no tokens remain + */ + @Nonnull + public HfqlLexerToken getNextToken(@Nonnull HfqlLexerOptions theOptions) { + lexNextToken(theOptions); + Validate.notBlank(myNextToken, "No next token is available"); + HfqlLexerToken token = new HfqlLexerToken(myNextToken, myNextTokenLine, myNextTokenColumn); + myNextToken = null; + return token; + } + + private void lexNextToken(@Nonnull HfqlLexerOptions theOptions) { + if (myNextToken != null) { + if (theOptions == myNextTokenOptions) { + // Already have a token, no action needed + return; + } else { + // Rewind because the options have changed + myNextToken = null; + myPosition = myNextTokenStartPosition; + } + } + + while (true) { + if (myPosition == myInput.length) { + if (myBuffer.length() > 0) { + if (myState == LexerState.IN_SINGLE_QUOTED_STRING || myParenDepth > 0) { + throw new InvalidRequestException( + Msg.code(2401) + "Unexpected end of string at position " + describePosition()); + } + setNextToken(theOptions, myBuffer.toString()); + } + return; + } + + char nextChar = myInput[myPosition]; + + handleNextChar(theOptions, nextChar); + + if (myNextToken != null) { + return; + } + + myPosition++; + if (nextChar == '\n') { + myLine++; + myColumn = 0; + } else if (nextChar != '\r') { + myColumn++; + } + } + } + + private void setNextToken(@Nonnull HfqlLexerOptions theOptions, String theNextToken) { + myNextTokenOptions = theOptions; + myNextToken = theNextToken; + myBuffer.setLength(0); + myState = LexerState.INITIAL; + } + + private void handleNextChar(@Nonnull HfqlLexerOptions theOptions, final char theNextChar) { + + if (theOptions.isSlurpParens()) { + if (theNextChar == '(') { + myParenDepth++; + } else if (theNextChar == ')') { + myParenDepth--; + } + } + + switch (myState) { + case INITIAL: { + if (isWhitespace(theNextChar)) { + return; + } + + if (theNextChar == '\'') { + myNextTokenLine = myLine; + myNextTokenColumn = myColumn; + myState = LexerState.IN_SINGLE_QUOTED_STRING; + myBuffer.append(theNextChar); + return; + } + + if (theOptions.getSingleCharTokenCharacters().contains(theNextChar)) { + myNextTokenStartPosition = myPosition; + setNextToken(theOptions, Character.toString(theNextChar)); + myPosition++; + return; + } + + if (theOptions.getMultiCharTokenCharacters().contains(theNextChar)) { + myNextTokenStartPosition = myPosition; + myNextTokenOptions = theOptions; + myNextTokenLine = myLine; + myNextTokenColumn = myColumn; + myState = LexerState.IN_TOKEN; + myBuffer.append(theNextChar); + return; + } + + break; + } + + case IN_TOKEN: { + if (theOptions.getMultiCharTokenCharacters().contains(theNextChar)) { + myBuffer.append(theNextChar); + return; + } + + if (myParenDepth > 0) { + myBuffer.append(theNextChar); + return; + } + + setNextToken(theOptions, myBuffer.toString()); + return; + } + + case IN_SINGLE_QUOTED_STRING: { + if (theNextChar == '\'') { + myBuffer.append(theNextChar); + myPosition++; + setNextToken(theOptions, myBuffer.toString()); + return; + } + if (theNextChar == '\\') { + if (myPosition < myInput.length - 1) { + char followingChar = myInput[myPosition + 1]; + if (followingChar == '\'') { + myBuffer.append(followingChar); + myPosition++; + return; + } + } + } + myBuffer.append(theNextChar); + return; + } + } + + throw new DataFormatException(Msg.code(2405) + "Unexpected character at position " + describePosition() + ": '" + + theNextChar + "' (" + (int) theNextChar + ")"); + } + + private String describePosition() { + return "[line " + myLine + ", column " + myColumn + "]"; + } + + public List allTokens() { + return allTokens(HfqlLexerOptions.HFQL_TOKEN); + } + + public List allTokens(@Nonnull HfqlLexerOptions theOptions) { + ArrayList retVal = new ArrayList<>(); + while (hasNextToken(theOptions)) { + retVal.add(getNextToken(theOptions).toString()); + } + return retVal; + } + + public boolean hasNextToken(@Nonnull HfqlLexerOptions theOptions) { + lexNextToken(theOptions); + return myNextToken != null; + } + + /** + * This method should only be called if there is a token already available + * (meaning that {@link #hasNextToken(HfqlLexerOptions) + * has been called). + */ + public void consumeNextToken() { + Validate.isTrue(myNextToken != null); + myNextToken = null; + } + + public HfqlLexerToken peekNextToken(HfqlLexerOptions theOptions) { + lexNextToken(theOptions); + if (myNextToken == null) { + return null; + } + return new HfqlLexerToken(myNextToken, myNextTokenLine, myNextTokenColumn); + } + + private enum LexerState { + INITIAL, + IN_SINGLE_QUOTED_STRING, + IN_TOKEN + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerOptions.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerOptions.java new file mode 100644 index 00000000000..f724d521c7c --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerOptions.java @@ -0,0 +1,116 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import java.util.Set; + +public enum HfqlLexerOptions { + + /** + * Standard HFQL tokenization rules for when we're not expecting anything + * more specialized. + */ + HFQL_TOKEN( + Set.of( + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', + 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', + 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', + '8', '9', '.', '[', ']', '_'), + Set.of(',', '=', '(', ')', '|', ':', '*'), + false), + + /** + * A FHIR search parameter name. + */ + SEARCH_PARAMETER_NAME( + Set.of( + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', + 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', + 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', + '8', '9', '_', ':', '.', '-'), + Set.of(), + false), + + /** + * A complete FHIRPath expression. + */ + FHIRPATH_EXPRESSION( + Set.of( + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', + 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', + 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', + '8', '9', '.', '[', ']', '_', '(', ')', '!', '~', '<', '>', '+', '-'), + Set.of(',', '|', ':', '*', '='), + true), + + /** + * Returns individual dot-parts of a FHIRPath expression as individual tokens, and also returns + * dots as separate tokens. + */ + FHIRPATH_EXPRESSION_PART( + Set.of( + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', + 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', + 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', + '8', '9', '[', ']', '_', '(', ')', '+', '-'), + Set.of(',', '=', '|', ':', '*', '.'), + true); + + private final Set myMultiCharTokenCharacters; + private final boolean mySlurpParens; + private final Set mySingleCharTokenCharacters; + + HfqlLexerOptions( + Set theMultiCharTokenCharacters, + Set theSingleCharTokenCharacters, + boolean theSlurpParens) { + myMultiCharTokenCharacters = theMultiCharTokenCharacters; + mySingleCharTokenCharacters = theSingleCharTokenCharacters; + mySlurpParens = theSlurpParens; + + if (mySlurpParens) { + assert myMultiCharTokenCharacters.contains('('); + assert !mySingleCharTokenCharacters.contains('('); + } + } + + /** + * These characters are treated as a single character token if they are found + */ + public Set getSingleCharTokenCharacters() { + return mySingleCharTokenCharacters; + } + + /** + * These characters are valid as a part of a multi-character token + */ + public Set getMultiCharTokenCharacters() { + return myMultiCharTokenCharacters; + } + + /** + * If we encounter a ( character in the token, should we grab everything until we find a + * matching ) character, regardless of which characters and whitespace are found between + * the parens? + */ + public boolean isSlurpParens() { + return mySlurpParens; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerToken.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerToken.java new file mode 100644 index 00000000000..2458f773d84 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerToken.java @@ -0,0 +1,85 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Locale; +import javax.annotation.Nonnull; + +class HfqlLexerToken { + + @Nonnull + public final String myToken; + + private final int myLine; + private final int myColumn; + + HfqlLexerToken(@Nonnull String theToken, int theLine, int theColumn) { + myToken = theToken; + myLine = theLine; + myColumn = theColumn; + } + + @Nonnull + String getToken() { + return myToken; + } + + int getLine() { + return myLine; + } + + int getColumn() { + return myColumn; + } + + /** + * Returns the token as a normalized keyword string. Normalization + * returns a capitalized version of the token. + */ + @Nonnull + public String asKeyword() { + return myToken.toUpperCase(Locale.US); + } + + @Nonnull + public String asString() { + return myToken; + } + + @Nonnull + public String describePosition() { + return "[line=" + getLine() + ", column=" + getColumn() + "]"; + } + + public boolean isQuotedString() { + return StringUtils.startsWith(myToken, "'") && StringUtils.endsWith(myToken, "'"); + } + + @Override + public String toString() { + return myToken; + } + + public Integer asInteger() throws NumberFormatException { + return Integer.parseInt(getToken()); + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatement.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatement.java new file mode 100644 index 00000000000..a6419017c3a --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatement.java @@ -0,0 +1,325 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.ValidateUtil; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * This class represents a parsed HFQL expression tree. It is useful for + * passing over the wire, but it should not be considered a stable model (in + * other words, don't persist these things long-term). + */ +public class HfqlStatement implements IModelJson { + + @JsonProperty("select") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List mySelectClauses = new ArrayList<>(); + + @JsonProperty("where") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List myWhereClauses = new ArrayList<>(); + + @JsonProperty("groupBy") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List myGroupByClauses = new ArrayList<>(); + + @JsonProperty("orderBy") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List myOrderByClauses = new ArrayList<>(); + + @JsonProperty("fromResourceName") + private String myFromResourceName; + + @JsonProperty("limit") + private Integer myLimit; + + public List getSelectClauses() { + return mySelectClauses; + } + + public String getFromResourceName() { + return myFromResourceName; + } + + public void setFromResourceName(String theFromResourceName) { + myFromResourceName = theFromResourceName; + } + + @Nonnull + public SelectClause addSelectClause(@Nonnull String theClause) { + SelectClauseOperator operator = SelectClauseOperator.SELECT; + return addSelectClause(theClause, operator); + } + + @Nonnull + public SelectClause addSelectClause(@Nonnull String theClause, @Nonnull SelectClauseOperator operator) { + SelectClause clause = new SelectClause(); + clause.setClause(theClause); + clause.setOperator(operator); + mySelectClauses.add(clause); + return clause; + } + + public WhereClause addWhereClause() { + WhereClause clause = new WhereClause(); + myWhereClauses.add(clause); + return clause; + } + + public void addWhereClause(String theLeft, WhereClauseOperatorEnum theOperator) { + WhereClause whereClause = addWhereClause(); + whereClause.setLeft(theLeft); + whereClause.setOperator(theOperator); + } + + public List getWhereClauses() { + return myWhereClauses; + } + + @Nullable + public Integer getLimit() { + return myLimit; + } + + public void setLimit(Integer theLimit) { + myLimit = theLimit; + } + + public void addGroupByClause(String theGroupByClause) { + ValidateUtil.isNotBlankOrThrowIllegalArgument(theGroupByClause, "theGroupByClause must not be null or blank"); + getGroupByClauses().add(theGroupByClause); + } + + public List getGroupByClauses() { + if (myGroupByClauses == null) { + myGroupByClauses = new ArrayList<>(); + } + return myGroupByClauses; + } + + public boolean hasCountClauses() { + return getSelectClauses().stream().anyMatch(t -> t.getOperator() == SelectClauseOperator.COUNT); + } + + public OrderByClause addOrderByClause(String theClause, boolean theAscending) { + ValidateUtil.isNotBlankOrThrowIllegalArgument(theClause, "theClause must not be null or blank"); + OrderByClause clause = new OrderByClause(); + clause.setClause(theClause); + clause.setAscending(theAscending); + getOrderByClauses().add(clause); + return clause; + } + + public List getOrderByClauses() { + if (myOrderByClauses == null) { + myGroupByClauses = new ArrayList<>(); + } + return myOrderByClauses; + } + + public int findSelectClauseIndex(String theClause) { + for (int i = 0; i < getSelectClauses().size(); i++) { + if (theClause.equals(getSelectClauses().get(i).getClause()) + || theClause.equals(getSelectClauses().get(i).getAlias())) { + return i; + } + } + return -1; + } + + public boolean hasOrderClause() { + return !getOrderByClauses().isEmpty(); + } + + public List toSelectedColumnAliases() { + return mySelectClauses.stream().map(SelectClause::getAlias).collect(Collectors.toList()); + } + + public List toSelectedColumnDataTypes() { + return mySelectClauses.stream().map(SelectClause::getDataType).collect(Collectors.toList()); + } + + public SelectClause addSelectClauseAndAlias(String theSelectClause) { + return addSelectClause(theSelectClause).setAlias(theSelectClause); + } + + public enum WhereClauseOperatorEnum { + EQUALS, + IN, + UNARY_BOOLEAN, + SEARCH_MATCH + } + + public enum SelectClauseOperator { + SELECT, + COUNT + } + + public static class OrderByClause implements IModelJson { + + @JsonProperty("clause") + private String myClause; + + @JsonProperty("ascending") + private boolean myAscending; + + public String getClause() { + return myClause; + } + + public void setClause(String theClause) { + myClause = theClause; + } + + public boolean isAscending() { + return myAscending; + } + + public void setAscending(boolean theAscending) { + myAscending = theAscending; + } + } + + public static class SelectClause implements IModelJson { + @JsonProperty("clause") + private String myClause; + + @JsonProperty("alias") + private String myAlias; + + @JsonProperty("operator") + private SelectClauseOperator myOperator; + + @JsonProperty("dataType") + private HfqlDataTypeEnum myDataType; + + /** + * Constructor + */ + public SelectClause() { + // nothing + } + + /** + * Constructor + * + * @param theClause The clause (will be used as both the clause and the alias) + */ + public SelectClause(String theClause) { + setOperator(SelectClauseOperator.SELECT); + setClause(theClause); + } + + public HfqlDataTypeEnum getDataType() { + return myDataType; + } + + public SelectClause setDataType(HfqlDataTypeEnum theDataType) { + myDataType = theDataType; + return this; + } + + public SelectClauseOperator getOperator() { + return myOperator; + } + + public void setOperator(SelectClauseOperator theOperator) { + myOperator = theOperator; + } + + public String getAlias() { + return myAlias; + } + + public SelectClause setAlias(String theAlias) { + myAlias = theAlias; + return this; + } + + public String getClause() { + return myClause; + } + + public void setClause(String theClause) { + myClause = theClause; + } + } + + public static class WhereClause implements IModelJson { + + @JsonProperty("left") + private String myLeft; + + @JsonProperty("operator") + private WhereClauseOperatorEnum myOperator; + + @JsonProperty("right") + private List myRight = new ArrayList<>(); + + public WhereClauseOperatorEnum getOperator() { + return myOperator; + } + + public void setOperator(WhereClauseOperatorEnum theOperator) { + myOperator = theOperator; + } + + public String getLeft() { + return myLeft; + } + + public void setLeft(String theLeft) { + myLeft = theLeft; + } + + public List getRight() { + return myRight; + } + + public void addRight(String theRight) { + myRight.add(theRight); + } + + /** + * Returns the {@link #getRight() right} values as raw strings. That + * means that any surrounding quote marks are stripped. + */ + public List getRightAsStrings() { + List retVal = new ArrayList<>(); + for (String next : getRight()) { + if (next.startsWith("'")) { + next = next.substring(1, next.length() - 1); + } + retVal.add(next); + } + return retVal; + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParser.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParser.java new file mode 100644 index 00000000000..0c6ede1489a --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParser.java @@ -0,0 +1,588 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.util.UrlUtil; +import org.apache.commons.lang3.Validate; + +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +public class HfqlStatementParser { + + public static final String KEYWORD_AND = "AND"; + public static final String KEYWORD_WHERE = "WHERE"; + public static final String KEYWORD_SELECT = "SELECT"; + public static final String KEYWORD_FROM = "FROM"; + public static final String KEYWORD_LIMIT = "LIMIT"; + public static final String KEYWORD_GROUP = "GROUP"; + public static final String KEYWORD_ORDER = "ORDER"; + public static final String KEYWORD_TRUE = "TRUE"; + public static final String KEYWORD_FALSE = "FALSE"; + private static final Set DIRECTIVE_KEYWORDS = + Set.of(KEYWORD_FROM, KEYWORD_GROUP, KEYWORD_LIMIT, KEYWORD_ORDER, KEYWORD_WHERE, KEYWORD_SELECT); + private final HfqlLexer myLexer; + private final FhirContext myFhirContext; + private BaseState myState; + + private HfqlStatement myStatement; + + public HfqlStatementParser(FhirContext theFhirContext, String theInput) { + myFhirContext = theFhirContext; + myLexer = new HfqlLexer(theInput); + myState = new InitialState(); + } + + /** + * This method may only be called once for a given instance + */ + public HfqlStatement parse() { + Validate.isTrue(myStatement == null, "Already completed parsing"); + myStatement = new HfqlStatement(); + + while (myLexer.hasNextToken(myState.getLexerOptions())) { + HfqlLexerToken nextToken = myLexer.getNextToken(myState.getLexerOptions()); + myState.consume(nextToken); + } + + if (isBlank(myStatement.getFromResourceName())) { + throw newExceptionUnexpectedTokenExpectToken(null, KEYWORD_FROM); + } + + if (myStatement.getSelectClauses().isEmpty()) { + throw newExceptionUnexpectedTokenExpectToken(null, KEYWORD_SELECT); + } + + Set existingAliases = new HashSet<>(); + for (HfqlStatement.SelectClause next : myStatement.getSelectClauses()) { + if (isNotBlank(next.getAlias())) { + if (!existingAliases.add(next.getAlias())) { + throw new DataFormatException(Msg.code(2414) + "Duplicate SELECT column alias: " + + UrlUtil.sanitizeUrlPart(next.getAlias())); + } + } + } + for (HfqlStatement.SelectClause next : myStatement.getSelectClauses()) { + if (isBlank(next.getAlias())) { + String candidateAlias = next.getClause(); + int nextSuffix = 2; + while (existingAliases.contains(candidateAlias)) { + candidateAlias = next.getClause() + nextSuffix; + nextSuffix++; + } + existingAliases.add(candidateAlias); + next.setAlias(candidateAlias); + } + } + + return myStatement; + } + + @Nonnull + private HfqlLexerToken getNextTokenRequired(@Nonnull HfqlLexerOptions theOptions) { + if (!myLexer.hasNextToken(theOptions)) { + throw newExceptionUnexpectedToken(null); + } + return myLexer.getNextToken(theOptions); + } + + @Nonnull + private static DataFormatException newExceptionUnexpectedToken(@Nullable HfqlLexerToken theToken) { + return newExceptionUnexpectedTokenExpectDescription(theToken, null); + } + + @Nonnull + private static DataFormatException newExceptionUnexpectedTokenExpectToken( + @Nullable HfqlLexerToken theToken, @Nonnull String theExpectedToken) { + return newExceptionUnexpectedTokenExpectDescription(theToken, "\"" + theExpectedToken + "\""); + } + + @Nonnull + private static DataFormatException newExceptionUnexpectedTokenExpectDescription( + @Nullable HfqlLexerToken theToken, @Nullable String theExpectedDescription) { + StringBuilder b = new StringBuilder(); + b.append("Unexpected "); + if (theToken != null) { + b.append("token"); + } else { + b.append("end of stream"); + } + if (theExpectedDescription != null) { + b.append(" (expected "); + b.append(theExpectedDescription); + b.append(")"); + } + if (theToken != null) { + b.append(" at position "); + b.append(theToken.describePosition()); + b.append(": "); + b.append(theToken.getToken()); + } + String message = b.toString(); + return new DataFormatException(message); + } + + @Nonnull + private static DataFormatException newExceptionUnknownResourceType(HfqlLexerToken theToken, String resourceType) { + return new DataFormatException("Invalid FROM statement. Unknown resource type '" + resourceType + + "' at position: " + theToken.describePosition()); + } + + private static void validateNotPresent(List theClauses, HfqlLexerToken theKeyword) { + if (!theClauses.isEmpty()) { + throw newExceptionUnexpectedToken(theKeyword); + } + } + + private static void validateNotPresent(Object theValue, HfqlLexerToken theKeyword) { + if (theValue != null) { + throw newExceptionUnexpectedToken(theKeyword); + } + } + + /** + * No tokens consumed yet + */ + public class InitialState extends BaseRootState { + // nothing + } + + /** + * Have consumed a 'from' token but not a resource type yet + */ + public class StateFromStart extends BaseState { + @Override + void consume(HfqlLexerToken theToken) { + String resourceType = theToken.asString(); + if (!myFhirContext.getResourceTypes().contains(resourceType)) { + throw newExceptionUnknownResourceType(theToken, resourceType); + } + myStatement.setFromResourceName(resourceType); + myState = new StateFromAfter(); + } + } + + /** + * Have consumed a 'from' token and a resource type + */ + public class StateFromAfter extends BaseRootState { + // nothing + } + + /** + * We're in the select statement + */ + public class StateInSelect extends BaseState { + @Nonnull + @Override + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.FHIRPATH_EXPRESSION; + } + + @Override + void consume(HfqlLexerToken theToken) { + String asKeyword = theToken.asKeyword(); + HfqlStatement.SelectClause clause; + if (asKeyword.startsWith("COUNT(") && asKeyword.endsWith(")")) { + String countClause = theToken.asString().substring("COUNT(".length(), asKeyword.length() - 1); + clause = myStatement.addSelectClause(countClause, HfqlStatement.SelectClauseOperator.COUNT); + clause.setAlias(theToken.getToken()); + } else { + String string = theToken.asString(); + clause = myStatement.addSelectClause(string); + } + myState = new StateInSelectAfterClause(clause); + } + } + + private class StateInSelectAfterClause extends StateSelectAfterClauseFinal { + public StateInSelectAfterClause(HfqlStatement.SelectClause theSelectClause) { + super(theSelectClause); + } + + @Override + void consume(HfqlLexerToken theToken) { + if (theToken.getToken().equals(":")) { + HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.FHIRPATH_EXPRESSION); + String clause = nextToken.asString(); + String alias = mySelectClause.getClause(); + mySelectClause.setAlias(alias); + mySelectClause.setClause(clause); + myState = new StateSelectAfterClauseFinal(mySelectClause); + } else if (theToken.asKeyword().equals("AS")) { + HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN); + String alias = nextToken.asString(); + mySelectClause.setAlias(alias); + myState = new StateSelectAfterClauseFinal(mySelectClause); + } else { + super.consume(theToken); + } + } + } + + private class StateSelectAfterClauseFinal extends BaseRootState { + protected final HfqlStatement.SelectClause mySelectClause; + + private StateSelectAfterClauseFinal(HfqlStatement.SelectClause theSelectClause) { + mySelectClause = theSelectClause; + } + + @Nonnull + @Override + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.FHIRPATH_EXPRESSION; + } + + @Override + void consume(HfqlLexerToken theToken) { + if (theToken.getToken().equals(",")) { + myState = new StateInSelect(); + } else if (!DIRECTIVE_KEYWORDS.contains(theToken.asKeyword())) { + String newClause = mySelectClause.getClause() + " " + theToken.getToken(); + mySelectClause.setClause(newClause); + } else { + super.consume(theToken); + } + } + } + + private class StateInWhereInitial extends BaseState { + @Nonnull + @Override + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.FHIRPATH_EXPRESSION; + } + + @Override + void consume(HfqlLexerToken theToken) { + HfqlStatement.WhereClause whereClause = myStatement.addWhereClause(); + String token = theToken.getToken(); + whereClause.setLeft(token); + whereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN); + myState = new StateInWhereAfterLeft(whereClause); + } + } + + private class StateInWhereAfterLeft extends BaseRootState { + private final HfqlStatement.WhereClause myWhereClause; + + public StateInWhereAfterLeft(HfqlStatement.WhereClause theWhereClause) { + myWhereClause = theWhereClause; + } + + @Nonnull + @Override + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.FHIRPATH_EXPRESSION; + } + + @Override + void consume(HfqlLexerToken theToken) { + if ("=".equals(theToken.getToken())) { + myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.EQUALS); + myState = new StateInWhereAfterOperatorEquals(myWhereClause); + } else if ("IN".equals(theToken.asKeyword())) { + HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN); + switch (nextToken.asKeyword()) { + case "(": + myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.IN); + myState = new StateInWhereAfterOperatorIn(myWhereClause); + return; + case "SEARCH_MATCH": + myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH); + HfqlLexerToken argumentsToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN); + String token = argumentsToken.getToken(); + if (!token.equals("(")) { + throw newExceptionUnexpectedTokenExpectToken(theToken, "("); + } + myState = new StateInWhereSearchMatch(myWhereClause); + return; + } + throw newExceptionUnexpectedTokenExpectToken(theToken, "("); + } else { + myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN); + + HfqlLexerToken nextToken = theToken; + if (!KEYWORD_AND.equals(nextToken.asKeyword()) && !DIRECTIVE_KEYWORDS.contains(nextToken.asKeyword())) { + StringBuilder expression = new StringBuilder(myWhereClause.getLeft()); + while (true) { + expression.append(' ').append(nextToken.getToken()); + + if (myLexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION)) { + nextToken = myLexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION); + String nextTokenAsKeyword = nextToken.asKeyword(); + if (KEYWORD_AND.equals(nextTokenAsKeyword) + || DIRECTIVE_KEYWORDS.contains(nextTokenAsKeyword)) { + break; + } + } else { + nextToken = null; + break; + } + } + + myWhereClause.setLeft(expression.toString()); + } + + if (nextToken != null) { + super.consume(nextToken); + } + } + } + } + + private class StateInWhereAfterOperatorEquals extends BaseState { + private final HfqlStatement.WhereClause myWhereClause; + + public StateInWhereAfterOperatorEquals(HfqlStatement.WhereClause theWhereClause) { + myWhereClause = theWhereClause; + } + + @Override + void consume(HfqlLexerToken theToken) { + String token = theToken.getToken(); + String keyword = theToken.asKeyword(); + if (KEYWORD_TRUE.equals(keyword) || KEYWORD_FALSE.equals(keyword)) { + token = keyword.toLowerCase(Locale.US); + } else if (!theToken.isQuotedString()) { + throw newExceptionUnexpectedTokenExpectDescription(theToken, "quoted string"); + } + myWhereClause.addRight(token); + myState = new StateAfterWhere(); + } + } + + private class StateInWhereAfterOperatorIn extends BaseState { + private final HfqlStatement.WhereClause myWhereClause; + + public StateInWhereAfterOperatorIn(HfqlStatement.WhereClause theWhereClause) { + myWhereClause = theWhereClause; + } + + @Override + void consume(HfqlLexerToken theToken) { + myWhereClause.addRight(theToken.getToken()); + + if (myLexer.peekNextToken(getLexerOptions()) != null) { + if (myLexer.peekNextToken(getLexerOptions()).getToken().equals("|")) { + myLexer.consumeNextToken(); + return; + } else if (myLexer.peekNextToken(getLexerOptions()).getToken().equals(",")) { + myLexer.consumeNextToken(); + return; + } else if (myLexer.peekNextToken(getLexerOptions()).getToken().equals(")")) { + myLexer.consumeNextToken(); + myState = new StateAfterWhere(); + return; + } + } + + throw newExceptionUnexpectedToken(myLexer.peekNextToken(getLexerOptions())); + } + } + + private class StateInWhereSearchMatch extends BaseState { + + private final HfqlStatement.WhereClause myWhereClause; + + public StateInWhereSearchMatch(HfqlStatement.WhereClause theWhereClause) { + myWhereClause = theWhereClause; + } + + @Override + void consume(HfqlLexerToken theToken) { + if (")".equals(theToken.getToken())) { + myState = new StateAfterWhere(); + } else { + myWhereClause.addRight(theToken.getToken()); + HfqlLexerToken nextToken = getNextTokenRequired(getLexerOptions()); + if (")".equals(nextToken.getToken())) { + myState = new StateAfterWhere(); + } else if (!",".equals(nextToken.getToken())) { + throw newExceptionUnexpectedTokenExpectToken(nextToken, ","); + } + } + } + } + + private class StateAfterWhere extends BaseRootState { + + @Override + void consume(HfqlLexerToken theToken) { + String keyword = theToken.asKeyword(); + if (keyword.equals(KEYWORD_AND)) { + myState = new StateInWhereInitial(); + } else { + super.consume(theToken); + } + } + } + + private class LimitState extends BaseState { + @Override + void consume(HfqlLexerToken theToken) { + try { + myStatement.setLimit(theToken.asInteger()); + } catch (NumberFormatException e) { + throw newExceptionUnexpectedTokenExpectDescription(theToken, "integer value"); + } + } + } + + private abstract class BaseRootState extends BaseState { + + @Override + void consume(HfqlLexerToken theToken) { + String keyword = theToken.asKeyword(); + switch (keyword) { + /* + * Update DIRECTIVE_KEYWORDS if you add new + * keywords here! + */ + case KEYWORD_WHERE: + validateNotPresent(myStatement.getWhereClauses(), theToken); + myState = new StateInWhereInitial(); + break; + case KEYWORD_SELECT: + validateNotPresent(myStatement.getSelectClauses(), theToken); + myState = new StateInSelect(); + break; + case KEYWORD_FROM: + validateNotPresent(myStatement.getFromResourceName(), theToken); + myState = new StateFromStart(); + break; + case KEYWORD_LIMIT: + validateNotPresent(myStatement.getLimit(), theToken); + myState = new LimitState(); + break; + case KEYWORD_GROUP: + validateNotPresent(myStatement.getGroupByClauses(), theToken); + myState = new StateGroup(); + break; + case KEYWORD_ORDER: + validateNotPresent(myStatement.getOrderByClauses(), theToken); + myState = new OrderState(); + break; + default: + if (myStatement.getWhereClauses().isEmpty()) { + throw newExceptionUnexpectedTokenExpectToken(theToken, KEYWORD_SELECT); + } else { + throw newExceptionUnexpectedToken(theToken); + } + } + } + } + + private class StateGroup extends BaseState { + @Override + void consume(HfqlLexerToken theToken) { + if (!"BY".equals(theToken.asKeyword())) { + throw newExceptionUnexpectedTokenExpectToken(theToken, "BY"); + } + myState = new StateGroupBy(); + } + } + + private class StateGroupBy extends BaseState { + @Override + void consume(HfqlLexerToken theToken) { + myStatement.addGroupByClause(theToken.asString()); + + if (myLexer.hasNextToken(HfqlLexerOptions.HFQL_TOKEN) + && "," + .equals(myLexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN) + .getToken())) { + myLexer.consumeNextToken(); + } else { + myState = new StateAfterGroupBy(); + } + } + } + + private class StateAfterGroupBy extends BaseRootState { + // nothing + } + + private class OrderState extends BaseState { + @Override + void consume(HfqlLexerToken theToken) { + if (!"BY".equals(theToken.asKeyword())) { + throw newExceptionUnexpectedTokenExpectToken(theToken, "BY"); + } + + myState = new OrderByState(); + } + } + + private class OrderByState extends BaseState { + + @Nonnull + @Override + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.FHIRPATH_EXPRESSION; + } + + @Override + void consume(HfqlLexerToken theToken) { + HfqlStatement.OrderByClause clause = myStatement.addOrderByClause(theToken.getToken(), true); + myState = new OrderByAfterState(clause); + } + } + + private class OrderByAfterState extends BaseRootState { + private final HfqlStatement.OrderByClause myClause; + + public OrderByAfterState(HfqlStatement.OrderByClause theClause) { + myClause = theClause; + } + + @Override + void consume(HfqlLexerToken theToken) { + if ("ASC".equals(theToken.asKeyword())) { + myClause.setAscending(true); + } else if ("DESC".equals(theToken.asKeyword())) { + myClause.setAscending(false); + } else if (",".equals(theToken.getToken())) { + myState = new OrderByState(); + } else { + super.consume(theToken); + } + } + } + + private abstract static class BaseState { + abstract void consume(HfqlLexerToken theToken); + + @Nonnull + public HfqlLexerOptions getLexerOptions() { + return HfqlLexerOptions.HFQL_TOKEN; + } + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProvider.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProvider.java new file mode 100644 index 00000000000..0728b670e94 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProvider.java @@ -0,0 +1,226 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.provider; + +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.util.DatatypeUtil; +import ca.uhn.fhir.util.JsonUtil; +import ca.uhn.fhir.util.ValidateUtil; +import ca.uhn.fhir.util.VersionUtil; +import org.apache.commons.csv.CSVPrinter; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.IntegerType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; +import org.springframework.beans.factory.annotation.Autowired; + +import java.io.IOException; +import java.io.OutputStreamWriter; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; + +import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClient.CSV_FORMAT; +import static ca.uhn.fhir.rest.api.Constants.CHARSET_UTF8_CTSUFFIX; +import static ca.uhn.fhir.rest.api.Constants.CT_TEXT_CSV; +import static ca.uhn.fhir.util.DatatypeUtil.toStringValue; + +public class HfqlRestProvider { + + @Autowired + private IHfqlExecutor myHfqlExecutor; + + /** + * Constructor + */ + public HfqlRestProvider() { + this(null); + } + + /** + * Constructor + */ + public HfqlRestProvider(IHfqlExecutor theHfqlExecutor) { + myHfqlExecutor = theHfqlExecutor; + } + + public IHfqlExecutor getHfqlExecutor() { + return myHfqlExecutor; + } + + public void setHfqlExecutor(IHfqlExecutor theHfqlExecutor) { + myHfqlExecutor = theHfqlExecutor; + } + + /** + * This function implements the $hfql-execute operation, which is + * the FHIR operation that the HFQL JDBC client uses to talk to the server. All + * communication between the client and the server goes through this operation. The + * response is not FHIR however: Responses from this operation are in CSV format using + * a custom CSV format that is understood by the client. See + * {@link #streamResponseCsv(HttpServletResponse, int, IHfqlExecutionResult, boolean, HfqlStatement)} + * to see how that format works. + */ + @Operation(name = HfqlConstants.HFQL_EXECUTE, manualResponse = true) + public void executeFql( + @OperationParam(name = HfqlConstants.PARAM_ACTION, typeName = "code", min = 0, max = 1) + IPrimitiveType theAction, + @OperationParam(name = HfqlConstants.PARAM_QUERY, typeName = "string", min = 0, max = 1) + IPrimitiveType theQuery, + @OperationParam(name = HfqlConstants.PARAM_STATEMENT, typeName = "string", min = 0, max = 1) + IPrimitiveType theStatement, + @OperationParam(name = HfqlConstants.PARAM_CONTINUATION, typeName = "string", min = 0, max = 1) + IPrimitiveType theContinuation, + @OperationParam(name = HfqlConstants.PARAM_LIMIT, typeName = "integer", min = 0, max = 1) + IPrimitiveType theLimit, + @OperationParam(name = HfqlConstants.PARAM_OFFSET, typeName = "integer", min = 0, max = 1) + IPrimitiveType theOffset, + @OperationParam(name = HfqlConstants.PARAM_FETCH_SIZE, typeName = "integer", min = 0, max = 1) + IPrimitiveType theFetchSize, + @OperationParam(name = HfqlConstants.PARAM_INTROSPECT_TABLE_NAME, typeName = "string", min = 0, max = 1) + IPrimitiveType theIntrospectTableName, + @OperationParam(name = HfqlConstants.PARAM_INTROSPECT_COLUMN_NAME, typeName = "string", min = 0, max = 1) + IPrimitiveType theIntrospectColumnName, + RequestDetails theRequestDetails, + HttpServletResponse theServletResponse) + throws IOException { + String action = toStringValue(theAction); + + int fetchSize = parseFetchSize(theFetchSize); + Integer limit = parseLimit(theLimit); + switch (action) { + case HfqlConstants.PARAM_ACTION_SEARCH: { + String query = toStringValue(theQuery); + IHfqlExecutionResult outcome = getHfqlExecutor().executeInitialSearch(query, limit, theRequestDetails); + streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement()); + break; + } + case HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION: { + String continuation = toStringValue(theContinuation); + ValidateUtil.isTrueOrThrowInvalidRequest( + theOffset != null && theOffset.hasValue(), "No offset supplied"); + int startingOffset = theOffset.getValue(); + + String statement = DatatypeUtil.toStringValue(theStatement); + ValidateUtil.isNotBlankOrThrowIllegalArgument(statement, "No statement provided"); + HfqlStatement statementJson = JsonUtil.deserialize(statement, HfqlStatement.class); + + IHfqlExecutionResult outcome = myHfqlExecutor.executeContinuation( + statementJson, continuation, startingOffset, limit, theRequestDetails); + streamResponseCsv(theServletResponse, fetchSize, outcome, false, outcome.getStatement()); + break; + } + case HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES: { + IHfqlExecutionResult outcome = myHfqlExecutor.introspectTables(); + streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement()); + break; + } + case HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS: { + String tableName = toStringValue(theIntrospectTableName); + String columnName = toStringValue(theIntrospectColumnName); + IHfqlExecutionResult outcome = myHfqlExecutor.introspectColumns(tableName, columnName); + streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement()); + break; + } + } + } + + @Nullable + private static Integer parseLimit(IPrimitiveType theLimit) { + Integer limit = null; + if (theLimit != null) { + limit = theLimit.getValue(); + } + return limit; + } + + private static int parseFetchSize(IPrimitiveType theFetchSize) { + int fetchSize = 1000; + if (theFetchSize != null && theFetchSize.getValue() != null) { + fetchSize = theFetchSize.getValue(); + } + if (fetchSize == 0) { + fetchSize = HfqlConstants.MAX_FETCH_SIZE; + } + ValidateUtil.isTrueOrThrowInvalidRequest( + fetchSize >= HfqlConstants.MIN_FETCH_SIZE && fetchSize <= HfqlConstants.MAX_FETCH_SIZE, + "Fetch size must be between %d and %d", + HfqlConstants.MIN_FETCH_SIZE, + HfqlConstants.MAX_FETCH_SIZE); + return fetchSize; + } + + private static void streamResponseCsv( + HttpServletResponse theServletResponse, + int theFetchSize, + IHfqlExecutionResult theResult, + boolean theInitialPage, + HfqlStatement theStatement) + throws IOException { + theServletResponse.setStatus(200); + theServletResponse.setContentType(CT_TEXT_CSV + CHARSET_UTF8_CTSUFFIX); + try (ServletOutputStream outputStream = theServletResponse.getOutputStream()) { + Appendable out = new OutputStreamWriter(outputStream); + CSVPrinter csvWriter = new CSVPrinter(out, CSV_FORMAT); + csvWriter.printRecords(); + + // Protocol version + csvWriter.printRecord(HfqlConstants.PROTOCOL_VERSION, "HAPI FHIR " + VersionUtil.getVersion()); + + // Search ID, Limit, Parsed FQL Statement + String searchId = theResult.getSearchId(); + String parsedFqlStatement = ""; + if (theInitialPage && theStatement != null) { + parsedFqlStatement = JsonUtil.serialize(theStatement, false); + } + csvWriter.printRecord(searchId, theResult.getLimit(), parsedFqlStatement); + + // Print the rows + int recordCount = 0; + while (recordCount++ < theFetchSize && theResult.hasNext()) { + IHfqlExecutionResult.Row nextRow = theResult.getNextRow(); + csvWriter.print(nextRow.getRowOffset()); + csvWriter.printRecord(nextRow.getRowValues()); + } + + csvWriter.close(true); + } + } + + @Nonnull + public static Parameters newQueryRequestParameters(String sql, Integer limit, int fetchSize) { + Parameters input = new Parameters(); + input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH)); + input.addParameter(HfqlConstants.PARAM_QUERY, new StringType(sql)); + if (limit != null) { + input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(limit)); + } + input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(fetchSize)); + return input; + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderCtxConfig.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderCtxConfig.java new file mode 100644 index 00000000000..228f76fd8fe --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderCtxConfig.java @@ -0,0 +1,42 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.provider; + +import ca.uhn.fhir.jpa.fql.executor.HfqlExecutor; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; + +@Configuration +public class HfqlRestProviderCtxConfig { + + @Bean + @Lazy + public IHfqlExecutor fqlExecutor() { + return new HfqlExecutor(); + } + + @Bean + @Lazy + public HfqlRestProvider fqlRestProvider() { + return new HfqlRestProvider(); + } +} diff --git a/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/util/HfqlConstants.java b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/util/HfqlConstants.java new file mode 100644 index 00000000000..63f319b8815 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/util/HfqlConstants.java @@ -0,0 +1,48 @@ +/*- + * #%L + * HAPI FHIR JPA Server - HFQL Driver + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.fql.util; + +public class HfqlConstants { + + public static final String HFQL_EXECUTE = "$hfql-execute"; + public static final String PARAM_QUERY = "query"; + public static final String PARAM_STATEMENT = "statement"; + public static final String PARAM_CONTINUATION = "continuation"; + public static final String PARAM_LIMIT = "limit"; + public static final String PARAM_OFFSET = "offset"; + public static final String PARAM_FETCH_SIZE = "fetchSize"; + public static final String PROTOCOL_VERSION = "1"; + public static final String PARAM_ACTION = "action"; + public static final String PARAM_ACTION_SEARCH = "search"; + public static final String PARAM_ACTION_SEARCH_CONTINUATION = "searchContinuation"; + public static final String PARAM_ACTION_INTROSPECT_TABLES = "introspectTables"; + public static final String PARAM_ACTION_INTROSPECT_COLUMNS = "introspectColumns"; + public static final int MIN_FETCH_SIZE = 1; + public static final int DEFAULT_FETCH_SIZE = 1000; + public static final int MAX_FETCH_SIZE = 10000; + public static final String PARAM_INTROSPECT_TABLE_NAME = "introspectTableName"; + public static final String PARAM_INTROSPECT_COLUMN_NAME = "introspectColumnName"; + /** + * This is the maximum number of results that can be sorted or grouped on + */ + public static final int ORDER_AND_GROUP_LIMIT = 10000; + + private HfqlConstants() {} +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutorTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutorTest.java new file mode 100644 index 00000000000..36645b4c3ef --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/executor/HfqlExecutorTest.java @@ -0,0 +1,1335 @@ +package ca.uhn.fhir.jpa.fql.executor; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.ParamPrefixEnum; +import ca.uhn.fhir.rest.param.QuantityParam; +import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.IPagingProvider; +import ca.uhn.fhir.rest.server.SimpleBundleProvider; +import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import com.google.common.collect.Lists; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.DateType; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.Quantity; +import org.hl7.fhir.r4.model.StringType; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Spy; +import org.mockito.junit.jupiter.MockitoExtension; + +import javax.annotation.Nonnull; +import java.sql.Types; +import java.util.ArrayList; +import java.util.List; + +import static ca.uhn.fhir.jpa.fql.util.HfqlConstants.ORDER_AND_GROUP_LIMIT; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.not; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class HfqlExecutorTest { + + private final RequestDetails mySrd = new SystemRequestDetails(); + @Spy + private FhirContext myCtx = FhirContext.forR4Cached(); + @Mock + private DaoRegistry myDaoRegistry; + @Mock + private IPagingProvider myPagingProvider; + @Spy + private ISearchParamRegistry mySearchParamRegistry = new FhirContextSearchParamRegistry(myCtx); + @InjectMocks + private HfqlExecutor myHfqlExecutor = new HfqlExecutor(); + @Captor + private ArgumentCaptor mySearchParameterMapCaptor; + + @Test + public void testContinuation() { + // Setup + HfqlStatement statement = new HfqlStatement(); + statement.setFromResourceName("Patient"); + statement.addSelectClause("name[0].given[1]").setAlias("name[0].given[1]").setDataType(HfqlDataTypeEnum.STRING); + statement.addSelectClause("name[0].family").setAlias("name[0].family").setDataType(HfqlDataTypeEnum.STRING); + statement.addWhereClause("name.family = 'Simpson'", HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN); + + String searchId = "the-search-id"; + when(myPagingProvider.retrieveResultList(any(), eq(searchId))).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + // Test + IHfqlExecutionResult result = myHfqlExecutor.executeContinuation(statement, searchId, 3, 100, mySrd); + + // Verify + assertThat(result.getStatement().toSelectedColumnAliases(), contains( + "name[0].given[1]", "name[0].family" + )); + assertTrue(result.hasNext()); + IHfqlExecutionResult.Row nextRow = result.getNextRow(); + assertEquals(3, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Marie", "Simpson")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(4, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Evelyn", "Simpson")); + assertFalse(result.hasNext()); + + } + + @Test + public void testSelect_OrderBy_ManyValues() { + + // Setup + + IFhirResourceDao patientDao = initDao(Patient.class); + List patients = new ArrayList<>(); + for (int i = 0; i < 5000; i++) { + Patient patient = new Patient(); + patient.getMeta().setVersionId(Integer.toString(i)); + patient.addName().setFamily("PT" + i); + patients.add(patient); + } + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(patients)); + String statement = """ + FROM Patient + SELECT + meta.versionId.toInteger() AS versionId, + name[0].family AS family + ORDER BY versionId DESC + """; + + // Test + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + // Verify + IHfqlExecutionResult.Row nextRow; + assertThat(result.getStatement().toSelectedColumnAliases(), contains( + "versionId", "family" + )); + for (int i = 4999; i >= 0; i--) { + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains(String.valueOf(i), "PT" + i)); + } + } + + + @Test + public void testSelect_OrderBy_SparseValues_Date() { + + // Setup + + IFhirResourceDao patientDao = initDao(Patient.class); + List patients = new ArrayList<>(); + Patient patient; + + patient = new Patient(); + patient.setId("PT0"); + patient.setBirthDateElement(new DateType("2023-01-01")); + patients.add(patient); + + patient = new Patient(); + patient.setId("PT1"); + patient.setBirthDateElement(new DateType("2022-01-01")); + patients.add(patient); + + patient = new Patient(); + patient.setId("PT2"); + patient.getBirthDateElement().addExtension("http://foo", new StringType("123")); + patients.add(patient); + + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(patients)); + String statement = """ + FROM Patient + SELECT id, birthDate + ORDER BY birthDate DESC + """; + + // Test + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + // Verify + IHfqlExecutionResult.Row nextRow; + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains("PT0", "2023-01-01")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains("PT1", "2022-01-01")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains("PT2", "")); + assertFalse(result.hasNext()); + } + + + + @Test + public void testFromSelect() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where name.family = 'Simpson' + select name[0].given[1], name[0].family, name, name.given + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases(), contains( + "name[0].given[1]", "name[0].family", "name", "name.given" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes(), contains( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.JSON, HfqlDataTypeEnum.JSON + )); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(0, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains( + "Jay", + "Simpson", + "[{\"family\":\"Simpson\",\"given\":[\"Homer\",\"Jay\"]}]", + "[\"Homer\", \"Jay\"]" + )); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(2, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains( + "El Barto", + "Simpson", + "[{\"family\":\"Simpson\",\"given\":[\"Bart\",\"El Barto\"]}]", + "[\"Bart\", \"El Barto\"]" + )); + assertTrue(result.hasNext()); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + // Default count + assertNull(mySearchParameterMapCaptor.getValue().getCount()); + } + + @Test + public void testSelect_InvalidSelectClause() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + String statement = """ + select foo() + from Patient + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + IHfqlExecutionResult.Row row = result.getNextRow(); + assertEquals(IHfqlExecutionResult.ROW_OFFSET_ERROR, row.getRowOffset()); + assertEquals("Failed to evaluate FHIRPath expression \"foo()\". Error: HAPI-2404: Error in ?? at 1, 1: The name foo is not a valid function name", row.getRowValues().get(0)); + assertFalse(result.hasNext()); + } + + @Test + public void testSelect_InvalidHavingClause() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + String statement = """ + select name + from Patient + where meta.versionId > 1 + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + IHfqlExecutionResult.Row row = result.getNextRow(); + assertEquals(IHfqlExecutionResult.ROW_OFFSET_ERROR, row.getRowOffset()); + assertEquals(Msg.code(2403) + "Unable to evaluate FHIRPath expression \"meta.versionId > 1\". Error: HAPI-0255: Error evaluating FHIRPath expression: Unable to compare values of type id and integer (@char 3)", row.getRowValues().get(0)); + assertFalse(result.hasNext()); + } + + @Test + public void testFromSelectStar() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + select * + from Patient + where name.family = 'Simpson' + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "id", "active", "address", "birthDate" + )); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), not(hasItem( + "extension" + ))); + } + + @Test + public void testSelect_Limit() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates()); + + String statement = """ + select name[0].given[0] + from Patient + limit 5 + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertTrue(result.hasNext()); + assertThat(result.getNextRow().getRowValues(), contains("Homer")); + assertTrue(result.hasNext()); + assertThat(result.getNextRow().getRowValues(), contains("Homer")); + assertTrue(result.hasNext()); + assertThat(result.getNextRow().getRowValues(), contains("Ned")); + assertTrue(result.hasNext()); + assertThat(result.getNextRow().getRowValues(), contains("Ned")); + assertTrue(result.hasNext()); + assertThat(result.getNextRow().getRowValues(), contains("Bart")); + assertFalse(result.hasNext()); + } + + @Test + public void testFromSelectNonPrimitivePath() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider( + createPatientHomerSimpson(), + createPatientNedFlanders() + )); + + String statement = """ + select name + from Patient + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.JSON + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, containsInAnyOrder( + Lists.newArrayList("[{\"family\":\"Simpson\",\"given\":[\"Homer\",\"Jay\"]}]"), + Lists.newArrayList("[{\"family\":\"Flanders\",\"given\":[\"Ned\"]}]") + )); + } + + @Test + public void testFromSelectCount() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates()); + String statement = """ + from Patient + select name.family, name.given, count(*) + group by name.family, name.given + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name.family", "name.given", "count(*)" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + // TODO: It'd make more sense if we used STRING instead of JSON here + HfqlDataTypeEnum.JSON, HfqlDataTypeEnum.JSON, HfqlDataTypeEnum.INTEGER + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, containsInAnyOrder( + Lists.newArrayList("Flanders", "Ned", 2), + Lists.newArrayList("Simpson", "Jay", 2), + Lists.newArrayList("Simpson", "Marie", 1), + Lists.newArrayList("Simpson", "Evelyn", 1), + Lists.newArrayList("Simpson", "Homer", 2), + Lists.newArrayList("Simpson", "Lisa", 1), + Lists.newArrayList("Simpson", "Bart", 1), + Lists.newArrayList("Simpson", "El Barto", 1), + Lists.newArrayList("Simpson", "Maggie", 1) + )); + } + + @Test + public void testFromSelectCount_TooMany() { + IFhirResourceDao patientDao = initDao(Patient.class); + List patients = new ArrayList<>(); + for (int i = 0; i < ORDER_AND_GROUP_LIMIT + 10; i++) { + Patient patient = new Patient(); + patient.addName().setFamily("PT" + i); + patients.add(patient); + } + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(patients)); + String statement = """ + from Patient + select name.family, count(*) + group by name.family + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertErrorMessage(result, Msg.code(2402) + "Can not group on > 10000 terms"); + } + + @Test + public void testFromSelectCount_NoGroup() { + IFhirResourceDao patientDao = initDao(Patient.class); + + // Only 0+1 have a family name + Patient pt0 = new Patient(); + pt0.addName().setFamily("Simpson"); + Patient pt1 = new Patient(); + pt1.addName().setFamily("Smithers"); + Patient pt2 = new Patient(); + pt2.addName().addGiven("Blah"); + + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(pt0, pt1, pt2)); + String statement = """ + select count(*), count(name.family) + from Patient + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "count(*)", "count(name.family)" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.INTEGER, HfqlDataTypeEnum.INTEGER + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, contains( + Lists.newArrayList(3, 2) + )); + } + + @Test + public void testFromSelectCountOrderBy() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates()); + String statement = """ + from Patient + select name[0].family, name[0].given, count(*) + group by name[0].family, name[0].given + order by count(*) desc, name[0].family asc, name[0].given asc + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].family", "name[0].given", "count(*)" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.INTEGER + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, contains( + Lists.newArrayList("Flanders", "Ned", 2), + Lists.newArrayList("Simpson", "Homer", 2), + Lists.newArrayList("Simpson", "Jay", 2), + Lists.newArrayList("Simpson", "Bart", 1), + Lists.newArrayList("Simpson", "El Barto", 1), + Lists.newArrayList("Simpson", "Evelyn", 1), + Lists.newArrayList("Simpson", "Lisa", 1), + Lists.newArrayList("Simpson", "Maggie", 1), + Lists.newArrayList("Simpson", "Marie", 1) + )); + } + + @Test + public void testFromSelectCountOrderBy_WithNulls() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider( + createPatientHomerSimpson(), + createPatientLisaSimpson(), + new Patient() + )); + String statement = """ + from Patient + select name[0].family, name[0].given[0] + order by name[0].family desc, name[0].given[0] desc + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].family", "name[0].given[0]" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, contains( + Lists.newArrayList("Simpson", "Lisa"), + Lists.newArrayList("Simpson", "Homer"), + Lists.newArrayList(null, null) + )); + } + + @Test + public void testFromSelectCountOrderBy_DateWithNulls() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider( + createPatientHomerSimpson().setBirthDateElement(new DateType("1950-01-01")), + createPatientLisaSimpson().setBirthDateElement(new DateType("1990-01-01")), + new Patient() + )); + String statement = """ + from Patient + select name[0].family, name[0].given[0], birthDate + order by birthDate desc + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].family", "name[0].given[0]", "birthDate" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.DATE + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, contains( + Lists.newArrayList("Simpson", "Lisa", "1990-01-01"), + Lists.newArrayList("Simpson", "Homer", "1950-01-01"), + Lists.newArrayList(null, null, null) + )); + } + + @Test + public void testFromSelectCountOrderBy_BooleanWithNulls() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider( + createPatientHomerSimpson().setActive(true), + createPatientLisaSimpson().setActive(false), + createPatientNedFlanders().setActive(true) + )); + String statement = """ + from Patient + select name[0].family, name[0].given[0], active + order by active asc, name[0].given[0] asc + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].family", "name[0].given[0]", "active" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.BOOLEAN + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, contains( + Lists.newArrayList("Simpson", "Lisa", "false"), + Lists.newArrayList("Simpson", "Homer", "true"), + Lists.newArrayList("Flanders", "Ned", "true") + )); + } + + @Test + public void testFromSelectCount_NullValues() { + IFhirResourceDao patientDao = initDao(Patient.class); + + when(patientDao.search(any(), any())).thenReturn(createProviderWithSparseNames()); + + String statement = """ + from Patient + select name[0].family, name[0].given[0], count(*), count(name[0].family) + group by name[0].family, name[0].given[0] + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].family", "name[0].given[0]", "count(*)", "count(name[0].family)" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.INTEGER, HfqlDataTypeEnum.INTEGER + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, containsInAnyOrder( + Lists.newArrayList(null, "Homer", 1, 0), + Lists.newArrayList("Simpson", "Homer", 1, 1), + Lists.newArrayList("Simpson", null, 1, 1), + Lists.newArrayList(null, null, 1, 0) + )); + } + + @Test + public void testFromSelectCount_NullValues_NoGroup() { + IFhirResourceDao patientDao = initDao(Patient.class); + + when(patientDao.search(any(), any())).thenReturn(createProviderWithSparseNames()); + + String statement = """ + from Patient + select count(*), count(name.family) + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "count(*)", "count(name.family)" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.INTEGER, HfqlDataTypeEnum.INTEGER + )); + + List> rowValues = readAllRowValues(result); + assertThat(rowValues.toString(), rowValues, containsInAnyOrder( + Lists.newArrayList(4, 2) + )); + } + + @Test + public void testFromSelectComplexFhirPath() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where name.family = 'Simpson' + select name[0].given[0], identifier.where(system = 'http://system' ).first().value + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].given[0]", "identifier.where(system = 'http://system' ).first().value" + )); + nextRow = result.getNextRow(); + + assertEquals("Homer", nextRow.getRowValues().get(0)); + assertEquals("value0", nextRow.getRowValues().get(1)); + } + + @Test + public void testFromSelectComplexFhirPath2() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where identifier.where(system = 'http://system' ).value = 'value0' + select name[0].given[0], identifier[0].value + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "name[0].given[0]", "identifier[0].value" + )); + nextRow = result.getNextRow(); + + assertEquals("Homer", nextRow.getRowValues().get(0)); + assertEquals("value0", nextRow.getRowValues().get(1)); + assertFalse(result.hasNext()); + } + + /** + * This should work but the FHIRPath evaluator doesn't seem to be + * doing the right thing + */ + @Test + @Disabled + public void testFromSelectComplexFhirPath3() { + IFhirResourceDao patientDao = initDao(Patient.class); + + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("123"); + + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(p)); + + String statement = """ + SELECT + COL1: identifier[0].system + '|' + identifier[0].value, + identifier[0].system + '|' + identifier[0].value AS COL2, + identifier[0].system + '|' + identifier[0].value + FROM + Patient + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "COL1", "COL2", "identifier[0].system + '|' + identifier[0].value" + )); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains( + "" + )); + assertFalse(result.hasNext()); + } + + @Test + public void testFromHavingComplexFhirPath_StringContains() { + IFhirResourceDao observationDao = initDao(Observation.class); + + Observation obs1 = createCardiologyNoteObservation("Observation/1", "Patient is running a lot"); + Observation obs2 = createCardiologyNoteObservation("Observation/2", "Patient is eating a lot"); + Observation obs3 = createCardiologyNoteObservation("Observation/3", "Patient is running a little"); + Observation obs4 = createCardiologyNoteObservation("Observation/4", "Patient is walking a lot"); + + when(observationDao.search(any(), any())).thenReturn(new SimpleBundleProvider(obs1, obs2, obs3, obs4)); + + String statement = """ + SELECT id + FROM Observation + WHERE + id in search_match('code', 'http://loinc.org|34752-6') + AND + value.ofType(string).lower().contains('running') + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "id" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING + )); + + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains( + "1" + )); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains( + "3" + )); + assertFalse(result.hasNext()); + + verify(observationDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.size()); + assertEquals("http://loinc.org|34752-6", map.get("code").get(0).get(0).getValueAsQueryToken(myCtx)); + } + + @Test + public void testFromWhereComplexFhirPath_Cast() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + String statement = """ + select name[0].given[0] + from Patient + where meta.versionId.toInteger() > 1 + """; + + IHfqlExecutionResult.Row row; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + assertTrue(result.hasNext()); + row = result.getNextRow(); + assertThat(row.getRowValues().toString(), row.getRowValues(), contains("Homer")); + + assertTrue(result.hasNext()); + row = result.getNextRow(); + assertThat(row.getRowValues().toString(), row.getRowValues(), contains("Bart")); + + assertFalse(result.hasNext()); + } + + + + @Test + public void testSelectComplexFhirPath_StringConcat() { + IFhirResourceDao patientDao = initDao(Patient.class); + + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(createPatientHomerSimpson())); + + String statement = """ + SELECT FullName: Patient.name.first().given.first() + ' ' + Patient.name.first().family + FROM Patient + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "FullName" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING + )); + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains( + "Homer Simpson" + )); + assertFalse(result.hasNext()); + } + + @Test + public void testHaving_ComplexFhirPath_Numeric() { + IFhirResourceDao observationDao = initDao(Observation.class); + + Observation obs1 = createWeightObservationWithKilos("Observation/1", 10L); + Observation obs2 = createWeightObservationWithKilos("Observation/2", 100L); + Observation obs3 = createWeightObservationWithKilos("Observation/3", 101L); + Observation obs4 = createWeightObservationWithKilos("Observation/4", 102L); + + when(observationDao.search(any(), any())).thenReturn(new SimpleBundleProvider(obs1, obs2, obs3, obs4)); + + String statement = """ + select + id, + value.ofType(Quantity).value, + value.ofType(Quantity).system, + value.ofType(Quantity).code + from Observation + where + value.ofType(Quantity).value > 100 + """; + + IHfqlExecutionResult.Row nextRow; + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases().toString(), result.getStatement().toSelectedColumnAliases(), hasItems( + "id", "value.ofType(Quantity).value", "value.ofType(Quantity).system", "value.ofType(Quantity).code" + )); + assertThat(result.getStatement().toSelectedColumnDataTypes().toString(), result.getStatement().toSelectedColumnDataTypes(), hasItems( + HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.DECIMAL, HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING + )); + + nextRow = result.getNextRow(); + assertThat(nextRow.getRowValues().toString(), nextRow.getRowValues(), contains( + "3", "101", "http://unitsofmeasure.org", "kg" + )); + } + + @Test + public void testFromHavingSelectIn() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where name.given in ('Foo' | 'Bart') + select Given:name[0].given[1], Family:name[0].family[0] + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases(), contains( + "Given", "Family" + )); + assertTrue(result.hasNext()); + IHfqlExecutionResult.Row nextRow = result.getNextRow(); + assertEquals(2, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("El Barto", "Simpson")); + assertFalse(result.hasNext()); + + } + + @Test + public void testFromHavingSelectEquals() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where name.given = 'Homer' + select Given:name[0].given[1], Family:name[0].family + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertThat(result.getStatement().toSelectedColumnAliases(), contains( + "Given", "Family" + )); + assertTrue(result.hasNext()); + IHfqlExecutionResult.Row row = result.getNextRow(); + assertEquals(0, row.getRowOffset()); + assertThat(row.getRowValues(), contains("Jay", "Simpson")); + assertFalse(result.hasNext()); + + } + + @Test + public void testIntrospectTables() { + IHfqlExecutionResult tables = myHfqlExecutor.introspectTables(); + assertEquals("TABLE_NAME", tables.getStatement().toSelectedColumnAliases().get(2)); + assertTrue(tables.hasNext()); + assertEquals("Account", tables.getNextRow().getRowValues().get(2)); + } + + @Test + public void testIntrospectColumns_NoSelector() { + IHfqlExecutionResult tables = myHfqlExecutor.introspectColumns(null, null); + assertEquals("TABLE_NAME", tables.getStatement().toSelectedColumnAliases().get(2), tables.getStatement().toSelectedColumnAliases().toString()); + assertEquals("COLUMN_NAME", tables.getStatement().toSelectedColumnAliases().get(3), tables.getStatement().toSelectedColumnAliases().toString()); + assertEquals("DATA_TYPE", tables.getStatement().toSelectedColumnAliases().get(4), tables.getStatement().toSelectedColumnAliases().toString()); + assertTrue(tables.hasNext()); + assertEquals("Account", tables.getNextRow().getRowValues().get(2)); + assertEquals("coverage", tables.getNextRow().getRowValues().get(3)); + assertEquals(Types.VARCHAR, tables.getNextRow().getRowValues().get(4)); + } + + @Test + public void testIntrospectColumns_TableSelector() { + IHfqlExecutionResult tables = myHfqlExecutor.introspectColumns("Patient", null); + assertEquals("TABLE_NAME", tables.getStatement().toSelectedColumnAliases().get(2), tables.getStatement().toSelectedColumnAliases().toString()); + assertEquals("COLUMN_NAME", tables.getStatement().toSelectedColumnAliases().get(3), tables.getStatement().toSelectedColumnAliases().toString()); + assertEquals("DATA_TYPE", tables.getStatement().toSelectedColumnAliases().get(4), tables.getStatement().toSelectedColumnAliases().toString()); + assertTrue(tables.hasNext()); + assertEquals("Patient", tables.getNextRow().getRowValues().get(2)); + assertEquals("address", tables.getNextRow().getRowValues().get(3)); + assertEquals(Types.VARCHAR, tables.getNextRow().getRowValues().get(4)); + } + + @ValueSource(strings = { + "_blah", "foo" + }) + @ParameterizedTest + public void testWhere_Error_UnknownParam(String theParamName) { + initDao(Patient.class); + + String statement = "from Patient " + + "where id in search_match('" + theParamName + "', 'abc') " + + "select name.given"; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertErrorMessage(result, "Unknown/unsupported search parameter: " + theParamName); + } + + private static void assertErrorMessage(IHfqlExecutionResult result, String expected) { + assertTrue(result.hasNext()); + IHfqlExecutionResult.Row nextRow = result.getNextRow(); + assertEquals(IHfqlExecutionResult.ROW_OFFSET_ERROR, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains(expected)); + } + + @Test + public void testWhere_Id_In_CommaList() { + IFhirResourceDao patientDao = initDao(Observation.class); + Observation resource = new Observation(); + resource.getMeta().setVersionId("5"); + resource.setId("Observation/123"); + resource.setValue(new Quantity(null, 500.1, "http://unitsofmeasure.org", "kg", "kg")); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(resource)); + + String statement = """ + select + id, meta.versionId, value.ofType(Quantity).value + from + Observation + where + id in search_match('_id', '123,Patient/456') + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + assertThat(result.getStatement().toSelectedColumnAliases(), contains("id", "meta.versionId", "value.ofType(Quantity).value")); + assertThat(result.getStatement().toSelectedColumnDataTypes(), contains(HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.LONGINT, HfqlDataTypeEnum.DECIMAL)); + assertTrue(result.hasNext()); + List nextRow = result.getNextRow().getRowValues(); + assertEquals("123", nextRow.get(0)); + assertEquals("5", nextRow.get(1)); + assertEquals("500.1", nextRow.get(2)); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("_id").size()); + assertEquals(2, map.get("_id").get(0).size()); + assertNull(((TokenParam) map.get("_id").get(0).get(0)).getSystem()); + assertEquals("123", ((TokenParam) map.get("_id").get(0).get(0)).getValue()); + assertNull(((TokenParam) map.get("_id").get(0).get(1)).getSystem()); + assertEquals("Patient/456", ((TokenParam) map.get("_id").get(0).get(1)).getValue()); + } + + @Test + public void testSearch_QualifiedSelect() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + select Patient.name[0].given[0] + """; + + IHfqlExecutionResult outcome = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + assertTrue(outcome.hasNext()); + assertEquals("Homer", outcome.getNextRow().getRowValues().get(0)); + + } + + @Test + public void testSelect_RepeatingElement_NeedsEscaping() { + IFhirResourceDao patientDao = initDao(Patient.class); + Patient patient = new Patient(); + patient.addName().addGiven("1\"2").addGiven("1\\,2"); + when(patientDao.search(any(), any())).thenReturn(new SimpleBundleProvider(patient)); + + + String statement = """ + SELECT + name.given + FROM + Patient + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + assertThat(result.getStatement().toSelectedColumnAliases(), contains("name.given")); + assertThat(result.getStatement().toSelectedColumnDataTypes(), contains(HfqlDataTypeEnum.JSON)); + assertTrue(result.hasNext()); + List nextRow = result.getNextRow().getRowValues(); + assertEquals("[\"1\\\"2\", \"1\\\\,2\"]", nextRow.get(0)); + + } + + @Test + public void testSearch_UnknownSelector() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + + String statement = """ + select + name[0].given[0], foo + from + Patient + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + assertThat(result.getStatement().toSelectedColumnAliases(), contains("name[0].given[0]", "foo")); + assertThat(result.getStatement().toSelectedColumnDataTypes(), contains(HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING)); + assertTrue(result.hasNext()); + List nextRow = result.getNextRow().getRowValues(); + assertEquals("Homer", nextRow.get(0)); + assertNull(nextRow.get(1)); + } + + @Test + public void testWhere_LastUpdated_In() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where id in search_match('_lastUpdated', 'lt2021,gt2023') + select name.given + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("_lastUpdated").size()); + assertEquals(2, map.get("_lastUpdated").get(0).size()); + assertEquals(ParamPrefixEnum.LESSTHAN, ((DateParam) map.get("_lastUpdated").get(0).get(0)).getPrefix()); + assertEquals("2021", ((DateParam) map.get("_lastUpdated").get(0).get(0)).getValueAsString()); + assertEquals(ParamPrefixEnum.GREATERTHAN, ((DateParam) map.get("_lastUpdated").get(0).get(1)).getPrefix()); + assertEquals("2023", ((DateParam) map.get("_lastUpdated").get(0).get(1)).getValueAsString()); + } + + @Test + public void testWhere_Boolean() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where id in search_match('active', 'true') + select name.given + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("active").size()); + assertEquals(1, map.get("active").get(0).size()); + assertNull(((TokenParam) map.get("active").get(0).get(0)).getSystem()); + assertEquals("true", ((TokenParam) map.get("active").get(0).get(0)).getValue()); + } + + @Test + public void testWhere_Quantity() { + IFhirResourceDao observationDao = initDao(Observation.class); + when(observationDao.search(any(), any())).thenReturn(new SimpleBundleProvider()); + + String statement = """ + from Observation + where id in search_match('value-quantity', 'lt500|http://unitsofmeasure.org|kg') + select id + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(observationDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("value-quantity").size()); + assertEquals(1, map.get("value-quantity").get(0).size()); + assertEquals("500", ((QuantityParam) map.get("value-quantity").get(0).get(0)).getValue().toString()); + assertEquals(ParamPrefixEnum.LESSTHAN, ((QuantityParam) map.get("value-quantity").get(0).get(0)).getPrefix()); + assertEquals("http://unitsofmeasure.org", ((QuantityParam) map.get("value-quantity").get(0).get(0)).getSystem()); + assertEquals("kg", ((QuantityParam) map.get("value-quantity").get(0).get(0)).getUnits()); + } + + @Test + public void testWhere_String() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where id in search_match('name', 'abc') + select name.given + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("name").size()); + assertEquals(1, map.get("name").get(0).size()); + assertEquals("abc", ((StringParam) map.get("name").get(0).get(0)).getValue()); + } + + @Test + public void testWhere_String_Exact() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + select name.given + from Patient + where id in search_match('name:exact', 'abc') + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(1, map.get("name").size()); + assertEquals(1, map.get("name").get(0).size()); + assertEquals("abc", ((StringParam) map.get("name").get(0).get(0)).getValue()); + assertTrue(((StringParam) map.get("name").get(0).get(0)).isExact()); + } + + @Test + public void testWhere_String_AndOr() { + IFhirResourceDao patientDao = initDao(Patient.class); + when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders()); + + String statement = """ + from Patient + where + id in search_match('name', 'A,B\\,B') + and + id in search_match('name', 'C,D') + select name.given + """; + + myHfqlExecutor.executeInitialSearch(statement, null, mySrd); + + verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any()); + SearchParameterMap map = mySearchParameterMapCaptor.getValue(); + assertEquals(2, map.get("name").size()); + assertEquals(2, map.get("name").get(0).size()); + assertEquals("A", ((StringParam) map.get("name").get(0).get(0)).getValue()); + assertEquals("B,B", ((StringParam) map.get("name").get(0).get(1)).getValue()); + assertEquals("C", ((StringParam) map.get("name").get(1).get(0)).getValue()); + assertEquals("D", ((StringParam) map.get("name").get(1).get(1)).getValue()); + } + + @Test + public void testError_InvalidFromType() { + String input = """ + from Foo + select Foo.blah + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(input, null, mySrd); + assertErrorMessage(result, "Invalid FROM statement. Unknown resource type 'Foo' at position: [line=0, column=5]"); + } + + @Test + public void testError_NonGroupedSelectInCountClause() { + initDao(Patient.class); + + String input = """ + from Patient + select count(*), name.family + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(input, null, mySrd); + assertErrorMessage(result, "Unable to select on non-grouped column in a count expression: name.family"); + } + + @Test + public void testError_SearchMatchOnNonId() { + initDao(Patient.class); + + String input = """ + select name.family + from Patient + where name in search_match('identifier', '1|1') + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(input, null, mySrd); + assertErrorMessage(result, "HAPI-2412: search_match function can only be applied to the id element"); + } + + @Test + public void testError_SearchMatchNotEnoughArguments() { + initDao(Patient.class); + + String input = """ + select name.family + from Patient + where id in search_match('identifier') + """; + + IHfqlExecutionResult result = myHfqlExecutor.executeInitialSearch(input, null, mySrd); + assertErrorMessage(result, "HAPI-2413: search_match function requires 2 arguments"); + } + + @SuppressWarnings("unchecked") + private IFhirResourceDao initDao(Class theType) { + IFhirResourceDao retVal = mock(IFhirResourceDao.class); + String type = myCtx.getResourceType(theType); + when(myDaoRegistry.getResourceDao(type)).thenReturn(retVal); + return retVal; + } + + @Nonnull + private static List> readAllRowValues(IHfqlExecutionResult result) { + List> rowValues = new ArrayList<>(); + while (result.hasNext()) { + rowValues.add(new ArrayList<>(result.getNextRow().getRowValues())); + } + return rowValues; + } + + @Nonnull + private static Observation createCardiologyNoteObservation(String id, String noteText) { + Observation obs = new Observation(); + obs.setId(id); + obs.getCode().addCoding() + .setSystem("http://loinc.org") + .setCode("34752-6"); + obs.setValue(new StringType(noteText)); + return obs; + } + + @Nonnull + private static Observation createWeightObservationWithKilos(String obsId, long kg) { + Observation obs = new Observation(); + obs.setId(obsId); + obs.getCode().addCoding() + .setSystem("http://loinc.org") + .setCode("29463-7"); + obs.setValue(new Quantity(null, kg, "http://unitsofmeasure.org", "kg", "kg")); + return obs; + } + + @Nonnull + private static SimpleBundleProvider createProviderWithSparseNames() { + Patient patientNoValues = new Patient(); + patientNoValues.setActive(true); + Patient patientFamilyNameOnly = new Patient(); + patientFamilyNameOnly.addName().setFamily("Simpson"); + Patient patientGivenNameOnly = new Patient(); + patientGivenNameOnly.addName().addGiven("Homer"); + Patient patientBothNames = new Patient(); + patientBothNames.addName().setFamily("Simpson").addGiven("Homer"); + return new SimpleBundleProvider(List.of( + patientNoValues, patientFamilyNameOnly, patientGivenNameOnly, patientBothNames)); + } + + @Nonnull + private static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlanders() { + return new SimpleBundleProvider( + createPatientHomerSimpson(), + createPatientNedFlanders(), + createPatientBartSimpson(), + createPatientLisaSimpson(), + createPatientMaggieSimpson() + ); + } + + @Nonnull + private static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates() { + return new SimpleBundleProvider( + createPatientHomerSimpson(), + createPatientHomerSimpson(), + createPatientNedFlanders(), + createPatientNedFlanders(), + createPatientBartSimpson(), + createPatientLisaSimpson(), + createPatientMaggieSimpson()); + } + + @Nonnull + private static Patient createPatientMaggieSimpson() { + Patient maggie = new Patient(); + maggie.addName().setFamily("Simpson").addGiven("Maggie").addGiven("Evelyn"); + maggie.addIdentifier().setSystem("http://system").setValue("value4"); + return maggie; + } + + @Nonnull + private static Patient createPatientLisaSimpson() { + Patient lisa = new Patient(); + lisa.getMeta().setVersionId("1"); + lisa.addName().setFamily("Simpson").addGiven("Lisa").addGiven("Marie"); + lisa.addIdentifier().setSystem("http://system").setValue("value3"); + return lisa; + } + + @Nonnull + private static Patient createPatientBartSimpson() { + Patient bart = new Patient(); + bart.getMeta().setVersionId("3"); + bart.addName().setFamily("Simpson").addGiven("Bart").addGiven("El Barto"); + bart.addIdentifier().setSystem("http://system").setValue("value2"); + return bart; + } + + @Nonnull + private static Patient createPatientNedFlanders() { + Patient nedFlanders = new Patient(); + nedFlanders.getMeta().setVersionId("1"); + nedFlanders.addName().setFamily("Flanders").addGiven("Ned"); + nedFlanders.addIdentifier().setSystem("http://system").setValue("value1"); + return nedFlanders; + } + + @Nonnull + private static Patient createPatientHomerSimpson() { + Patient homer = new Patient(); + homer.getMeta().setVersionId("2"); + homer.addName().setFamily("Simpson").addGiven("Homer").addGiven("Jay"); + homer.addIdentifier().setSystem("http://system").setValue("value0"); + homer.setBirthDateElement(new DateType("1950-01-01")); + return homer; + } + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClientTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClientTest.java new file mode 100644 index 00000000000..044717fee96 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/HfqlRestClientTest.java @@ -0,0 +1,184 @@ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Interceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.executor.StaticHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.IntegerType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.util.Base64Utils; + +import javax.annotation.Nonnull; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class HfqlRestClientTest { + private static final FhirContext ourCtx = FhirContext.forR4Cached(); + private static final String USERNAME = "some-username"; + private static final String PASSWORD = "some-password"; + private static final HeaderCaptureInterceptor ourHeaderCaptureInterceptor = new HeaderCaptureInterceptor(); + @Mock + private IHfqlExecutor myFqlExecutor; + @Mock + private IHfqlExecutionResult myMockFqlResult0; + @Mock + private IHfqlExecutionResult myMockFqlResult1; + @InjectMocks + private static final HfqlRestProvider ourProvider = new HfqlRestProvider(); + @RegisterExtension + public static final RestfulServerExtension ourServer = new RestfulServerExtension(ourCtx) + .registerProvider(ourProvider) + .registerInterceptor(ourHeaderCaptureInterceptor); + @Captor + private ArgumentCaptor myStatementCaptor; + @Captor + private ArgumentCaptor myRequestDetailsCaptor; + @Captor + private ArgumentCaptor myLimitCaptor; + private HfqlRestClient myClient; + + @BeforeEach + public void beforeEach() { + ourHeaderCaptureInterceptor.clear(); + myClient = new HfqlRestClient(ourServer.getBaseUrl(), USERNAME, PASSWORD); + } + + @AfterEach + public void afterEach() { + myClient.close(); + } + + + @Test + public void testExecuteSearchAndContinuation() throws SQLException { + String sql = "from Patient select name.family, name.given where name.family = 'Simpson'"; + String searchId = "my-search-id"; + HfqlStatement statement = createFakeStatement(); + when(myMockFqlResult0.getStatement()).thenReturn(statement); + when(myMockFqlResult0.hasNext()).thenReturn(true, true, true); + when(myMockFqlResult0.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")), + new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge")), + // Fetch size is 2 so this one shouldn't get returned in the first pass + new IHfqlExecutionResult.Row(5, List.of("Simpson", "Maggie")) + ); + when(myMockFqlResult0.getSearchId()).thenReturn(searchId); + when(myMockFqlResult0.getLimit()).thenReturn(123); + when(myFqlExecutor.executeInitialSearch(eq(sql), any(), any())).thenReturn(myMockFqlResult0); + + when(myMockFqlResult1.getStatement()).thenReturn(statement); + when(myMockFqlResult1.hasNext()).thenReturn(true, true, false); + when(myMockFqlResult1.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(5, List.of("Simpson", "Maggie")), + new IHfqlExecutionResult.Row(7, List.of("Simpson", "Lisa")) + ); + when(myMockFqlResult1.getSearchId()).thenReturn(searchId); + when(myMockFqlResult1.getLimit()).thenReturn(123); + when(myFqlExecutor.executeContinuation(any(), eq(searchId), eq(4), eq(123), any())).thenReturn(myMockFqlResult1); + when(myFqlExecutor.executeContinuation(any(), eq(searchId), eq(8), eq(123), any())).thenReturn(new StaticHfqlExecutionResult(searchId)); + + Parameters input = new Parameters(); + input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH)); + input.addParameter(HfqlConstants.PARAM_QUERY, new StringType(sql)); + input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(123)); + input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(2)); + + IHfqlExecutionResult result = myClient.execute(input, true, 2); + IHfqlExecutionResult.Row nextRow; + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(0, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Simpson", "Homer")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(3, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Simpson", "Marge")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(5, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Simpson", "Maggie")); + assertTrue(result.hasNext()); + nextRow = result.getNextRow(); + assertEquals(7, nextRow.getRowOffset()); + assertThat(nextRow.getRowValues(), contains("Simpson", "Lisa")); + assertFalse(result.hasNext()); + + verify(myFqlExecutor, times(1)).executeInitialSearch(myStatementCaptor.capture(), myLimitCaptor.capture(), myRequestDetailsCaptor.capture()); + assertEquals(sql, myStatementCaptor.getValue()); + String expectedAuthHeader = Constants.HEADER_AUTHORIZATION_VALPREFIX_BASIC + Base64Utils.encodeToString((USERNAME + ":" + PASSWORD).getBytes(StandardCharsets.UTF_8)); + + + String actual = ourHeaderCaptureInterceptor.getCapturedHeaders().get(0).get(Constants.HEADER_AUTHORIZATION).get(0); + assertEquals(expectedAuthHeader, actual); + assertEquals(123, myLimitCaptor.getValue().intValue()); + } + + @Nonnull + public static HfqlStatement createFakeStatement() { + HfqlStatement statement = new HfqlStatement(); + statement.setFromResourceName("Patient"); + statement.addSelectClause("name[0].family").setAlias("name[0].family").setDataType(HfqlDataTypeEnum.STRING); + statement.addSelectClause("name[0].given[0]").setAlias("name[0].given[0]").setDataType(HfqlDataTypeEnum.STRING); + + return statement; + } + + @Interceptor + public static class HeaderCaptureInterceptor { + + private final List>> myCapturedHeaders = new ArrayList<>(); + + @Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED) + public void capture(ServletRequestDetails theServletRequestDetails) { + myCapturedHeaders.add(theServletRequestDetails.getHeaders()); + } + + public void clear() { + myCapturedHeaders.clear(); + } + + public List>> getCapturedHeaders() { + return myCapturedHeaders; + } + + } + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriverTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriverTest.java new file mode 100644 index 00000000000..7e643fcd171 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriverTest.java @@ -0,0 +1,327 @@ +package ca.uhn.fhir.jpa.fql.jdbc; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; +import com.google.common.collect.Lists; +import org.apache.commons.dbcp2.BasicDataSource; +import org.hl7.fhir.r4.model.DateTimeType; +import org.hl7.fhir.r4.model.DateType; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.jdbc.UncategorizedSQLException; +import org.springframework.jdbc.core.ColumnMapRowMapper; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.util.Base64Utils; + +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; +import java.util.Map; + +import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClientTest.createFakeStatement; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@SuppressWarnings({"SqlDialectInspection", "SqlNoDataSourceInspection"}) +@ExtendWith(MockitoExtension.class) +public class JdbcDriverTest { + public static final String SOME_USERNAME = "some-username"; + public static final String SOME_PASSWORD = "some-password"; + private static final FhirContext ourCtx = FhirContext.forR4Cached(); + private static final HfqlRestClientTest.HeaderCaptureInterceptor ourHeaderCaptureInterceptor = new HfqlRestClientTest.HeaderCaptureInterceptor(); + @Mock + private IHfqlExecutor myFqlExecutor; + @Mock + private IHfqlExecutionResult myMockFqlResult; + @InjectMocks + private HfqlRestProvider myProvider = new HfqlRestProvider(); + @RegisterExtension + public RestfulServerExtension myServer = new RestfulServerExtension(ourCtx) + .registerProvider(myProvider) + .registerInterceptor(ourHeaderCaptureInterceptor); + + private BasicDataSource myDs; + + @BeforeEach + public void beforeEach() throws SQLException { + JdbcDriver.load(); + + myDs = new BasicDataSource(); + myDs.setUrl(JdbcDriver.URL_PREFIX + myServer.getBaseUrl()); + myDs.setUsername(SOME_USERNAME); + myDs.setPassword(SOME_PASSWORD); + myDs.start(); + + ourHeaderCaptureInterceptor.clear(); + } + + @AfterEach + public void afterEach() throws SQLException { + myDs.close(); + + JdbcDriver.unload(); + } + + @Test + public void testExecuteStatement() { + HfqlStatement statement = createFakeStatement(); + when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")), + new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge")) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(999); + + String input = """ + from Patient + select name.family, name.given + """; + JdbcTemplate jdbcTemplate = new JdbcTemplate(myDs); + List> outcome = jdbcTemplate.query(input, new ColumnMapRowMapper()); + assertEquals(2, outcome.size()); + + String expectedAuthHeader = Constants.HEADER_AUTHORIZATION_VALPREFIX_BASIC + Base64Utils.encodeToString((SOME_USERNAME + ":" + SOME_PASSWORD).getBytes(StandardCharsets.UTF_8)); + String actual = ourHeaderCaptureInterceptor.getCapturedHeaders().get(0).get(Constants.HEADER_AUTHORIZATION).get(0); + assertEquals(expectedAuthHeader, actual); + } + + @Test + public void testExecuteStatement_ReturnsError() { + String errorMessage = "this is an error!"; + + HfqlStatement statement = createFakeStatement(); + when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.hasNext()).thenReturn(true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(IHfqlExecutionResult.ROW_OFFSET_ERROR, List.of(errorMessage)) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(999); + + String input = """ + from Patient + select name.family, name.given + """; + JdbcTemplate jdbcTemplate = new JdbcTemplate(myDs); + + try { + jdbcTemplate.query(input, new ColumnMapRowMapper()); + fail(); + } catch (UncategorizedSQLException e) { + assertEquals(SQLException.class, e.getCause().getClass()); + assertEquals(Msg.code(2395) + "this is an error!", e.getCause().getMessage()); + } + } + + @Test + public void testDataTypes() throws SQLException { + // Setup + HfqlStatement hfqlStatement = new HfqlStatement(); + hfqlStatement.setFromResourceName("Patient"); + hfqlStatement.addSelectClauseAndAlias("col.string").setDataType(HfqlDataTypeEnum.STRING); + hfqlStatement.addSelectClauseAndAlias("col.date").setDataType(HfqlDataTypeEnum.DATE); + hfqlStatement.addSelectClauseAndAlias("col.boolean").setDataType(HfqlDataTypeEnum.BOOLEAN); + hfqlStatement.addSelectClauseAndAlias("col.time").setDataType(HfqlDataTypeEnum.TIME); + hfqlStatement.addSelectClauseAndAlias("col.decimal").setDataType(HfqlDataTypeEnum.DECIMAL); + hfqlStatement.addSelectClauseAndAlias("col.integer").setDataType(HfqlDataTypeEnum.INTEGER); + hfqlStatement.addSelectClauseAndAlias("col.longint").setDataType(HfqlDataTypeEnum.LONGINT); + hfqlStatement.addSelectClauseAndAlias("col.timestamp").setDataType(HfqlDataTypeEnum.TIMESTAMP); + when(myMockFqlResult.getStatement()).thenReturn(hfqlStatement); + + when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult); + when(myMockFqlResult.hasNext()).thenReturn(true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("a-string", "2023-02-02", "true", "12:23:22", "100.123", "123", "987", "2023-02-12T10:01:02.234Z")) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(999); + + String input = """ + select col.string, col.date, col.boolean, col.time, col.decimal, col.integer, col.longint, col.timestamp + from Patient + """; + + // Test + Connection connection = myDs.getConnection(); + Statement statement = connection.createStatement(); + assertTrue(statement.execute(input)); + ResultSet resultSet = statement.getResultSet(); + + // Verify + assertTrue(resultSet.next()); + assertEquals("a-string", resultSet.getString("col.string")); + assertEquals(new DateType("2023-02-02").getValue(), resultSet.getDate("col.date")); + assertEquals(true, resultSet.getBoolean("col.boolean")); + assertEquals("12:23:22", resultSet.getTime("col.time").toString()); + assertEquals(new BigDecimal("100.123"), resultSet.getBigDecimal("col.decimal")); + assertEquals(new BigDecimal("100.123"), resultSet.getBigDecimal("col.decimal", 100)); + assertEquals(100.123f, resultSet.getFloat("col.decimal")); + assertEquals(100.123d, resultSet.getDouble("col.decimal")); + assertEquals(123, resultSet.getInt("col.integer")); + assertEquals(987L, resultSet.getLong("col.longint")); + assertEquals(new Timestamp(new DateTimeType("2023-02-12T10:01:02.234Z").getValue().getTime()), resultSet.getTimestamp("col.timestamp")); + + // Using getObject + assertEquals("a-string", resultSet.getObject("col.string")); + assertEquals(new DateType("2023-02-02").getValue(), resultSet.getObject("col.date")); + assertEquals(true, resultSet.getObject("col.boolean")); + assertEquals("12:23:22", resultSet.getObject("col.time").toString()); + assertEquals(new BigDecimal("100.123"), resultSet.getObject("col.decimal")); + assertEquals(123, resultSet.getObject("col.integer")); + assertEquals(987L, resultSet.getObject("col.longint")); + assertEquals(new Timestamp(new DateTimeType("2023-02-12T10:01:02.234Z").getValue().getTime()), resultSet.getObject("col.timestamp")); + + assertThrows(SQLException.class, () -> resultSet.getString(0)); + assertThrows(SQLException.class, () -> resultSet.getString(999)); + assertThrows(SQLException.class, () -> resultSet.getString("foo")); + } + + @Test + public void testDatatypes_TimestampPrecision() throws SQLException { + // Setup + when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult); + HfqlStatement fakeStatement = createFakeStatement(); + fakeStatement.getSelectClauses().clear(); + fakeStatement.addSelectClause("col.time").setAlias("col.time").setDataType(HfqlDataTypeEnum.TIME); + when(myMockFqlResult.getStatement()).thenReturn(fakeStatement); + when(myMockFqlResult.hasNext()).thenReturn(true, true, true, true, true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("12:23")), + new IHfqlExecutionResult.Row(1, List.of("12:23:10")), + new IHfqlExecutionResult.Row(2, List.of("12:23:11.0")), + new IHfqlExecutionResult.Row(3, List.of("12:23:12.12")), + new IHfqlExecutionResult.Row(4, List.of("12:23:13.123")) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(999); + + String input = "select col.time from Patient"; + + // Test + Connection connection = myDs.getConnection(); + Statement statement = connection.createStatement(); + assertTrue(statement.execute(input)); + ResultSet resultSet = statement.getResultSet(); + + // Verify + assertTrue(resultSet.next()); + assertEquals("12:23:00", resultSet.getTime("col.time").toString()); + assertTrue(resultSet.next()); + assertEquals("12:23:10", resultSet.getTime("col.time").toString()); + assertTrue(resultSet.next()); + assertEquals("12:23:11", resultSet.getTime("col.time").toString()); + assertTrue(resultSet.next()); + assertEquals("12:23:12", resultSet.getTime("col.time").toString()); + assertTrue(resultSet.next()); + assertEquals("12:23:13", resultSet.getTime("col.time").toString()); + assertFalse(resultSet.next()); + + verify(myFqlExecutor, times(1)).executeInitialSearch(any(), any(), any()); + verify(myFqlExecutor, times(0)).executeContinuation(any(), any(), anyInt(), any(), any()); + } + + + @Test + public void testIntrospectTables() throws SQLException { + when(myFqlExecutor.introspectTables()).thenReturn(myMockFqlResult); + HfqlStatement statement = new HfqlStatement(); + statement.addSelectClause("TABLE_NAME").setAlias("TABLE_NAME").setDataType(HfqlDataTypeEnum.STRING); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.hasNext()).thenReturn(true, false); + when(myMockFqlResult.getNextRow()).thenReturn(new IHfqlExecutionResult.Row(0, List.of("Account"))); + + Connection connection = myDs.getConnection(); + DatabaseMetaData metadata = connection.getMetaData(); + ResultSet tables = metadata.getTables(null, null, null, null); + assertTrue(tables.isBeforeFirst()); + assertTrue(tables.next()); + assertFalse(tables.isBeforeFirst()); + assertEquals("Account", tables.getString(1)); + assertEquals("Account", tables.getString("TABLE_NAME")); + } + + + @Test + public void testIntrospectColumns() throws SQLException { + when(myFqlExecutor.introspectColumns(any(), any())).thenReturn(myMockFqlResult); + HfqlStatement statement = new HfqlStatement(); + statement.addSelectClauseAndAlias("COLUMN_NAME").setDataType(HfqlDataTypeEnum.STRING); + statement.addSelectClauseAndAlias("DATA_TYPE").setDataType(HfqlDataTypeEnum.INTEGER); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, Lists.newArrayList("foo", Types.VARCHAR)), + new IHfqlExecutionResult.Row(1, Lists.newArrayList("bar", null)) + ); + + Connection connection = myDs.getConnection(); + DatabaseMetaData metadata = connection.getMetaData(); + ResultSet tables = metadata.getColumns(null, null, null, null); + + // Row 1 + assertTrue(tables.next()); + assertEquals("foo", tables.getString(1)); + assertEquals("foo", tables.getString("COLUMN_NAME")); + assertFalse(tables.wasNull()); + assertEquals(Types.VARCHAR, tables.getInt(2)); + assertEquals(Types.VARCHAR, tables.getInt("DATA_TYPE")); + assertFalse(tables.wasNull()); + // Row 2 + assertTrue(tables.next()); + assertEquals("bar", tables.getString(1)); + assertEquals("bar", tables.getString("COLUMN_NAME")); + assertEquals(0, tables.getInt(2)); + assertEquals(0, tables.getInt("DATA_TYPE")); + assertTrue(tables.wasNull()); + // No more rows + assertFalse(tables.next()); + // Invalid columns + assertThrows(SQLException.class, () -> tables.getString(0)); + assertThrows(SQLException.class, () -> tables.getString(999)); + assertThrows(SQLException.class, () -> tables.getString("foo")); + + } + + @Test + public void testMetadata_ImportedAndExportedKeys() throws SQLException { + Connection connection = myDs.getConnection(); + DatabaseMetaData metadata = connection.getMetaData(); + + assertFalse(metadata.getImportedKeys(null, null, null).next()); + assertFalse(metadata.getExportedKeys(null, null, null).next()); + } + + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParserTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParserTest.java new file mode 100644 index 00000000000..74bd9501a19 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlFhirPathParserTest.java @@ -0,0 +1,60 @@ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.context.BaseRuntimeElementDefinition; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class HfqlFhirPathParserTest { + + @ParameterizedTest + @CsvSource(value = { + // Good + "Patient , Patient.name.family , JSON", + "Patient , Patient.name[0].family , STRING", + "Patient , Patient.name.family[0] , JSON", + "Patient , Patient.name[0].family[0] , STRING", + "Patient , Patient.name.given.getValue().is(System.string) , JSON", + "Patient , Patient.name.given.getValue().is(System.string).first() , STRING", + "Patient , Patient.identifier.where(system='foo').system , JSON", + "Patient , Patient.identifier.where(system='foo').first().system , STRING", + "Observation , Observation.value.ofType(Quantity).value , DECIMAL", + "Patient , name.family , JSON", + "Patient , name[0].family[0] , STRING", + "Patient , name.given.getValue().is(System.string) , JSON", + "Patient , identifier.where(system='foo').system , JSON", + "Patient , identifier[0].where(system='foo').system , STRING", + "Observation , value.ofType(Quantity).value , DECIMAL", + "Patient , Patient.meta.versionId.toInteger() , INTEGER", + "Patient , Patient.identifier , JSON", + // Bad + "Patient , foo , ", + }) + void testDetermineDatatypeForPath(String theResourceType, String theFhirPath, HfqlDataTypeEnum theExpectedType) { + HfqlFhirPathParser svc = new HfqlFhirPathParser(FhirContext.forR4Cached()); + HfqlDataTypeEnum actual = svc.determineDatatypeForPath(theResourceType, theFhirPath); + assertEquals(theExpectedType, actual); + } + + + @Test + void testAllFhirDataTypesHaveMappings() { + FhirContext ctx = FhirContext.forR5Cached(); + int foundCount = 0; + for (BaseRuntimeElementDefinition next : ctx.getElementDefinitions()) { + if (next instanceof RuntimePrimitiveDatatypeDefinition) { + assertNotNull(HfqlFhirPathParser.getHfqlDataTypeForFhirType(next.getName()), () -> "No mapping for type: " + next.getName()); + foundCount++; + } + } + assertEquals(21, foundCount); + } + + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerTest.java new file mode 100644 index 00000000000..c094bc545ed --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlLexerTest.java @@ -0,0 +1,166 @@ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +public class HfqlLexerTest { + + @Test + void testSimpleStatement() { + String input = """ + from Patient + select + name.given[0], + name.family + """; + List allTokens = new HfqlLexer(input).allTokens(); + assertThat(allTokens, contains( + "from", "Patient", "select", "name.given[0]", ",", "name.family" + )); + } + + @Test + void testSelectStar() { + String input = """ + from Patient + select + * + """; + List allTokens = new HfqlLexer(input).allTokens(); + assertThat(allTokens, contains( + "from", "Patient", "select", "*" + )); + } + + @Test + void testQuotedString() { + String input = """ + from + Patient + where + name.given = 'Foo \\' Chalmers' + select + name.given[0],\s + name.family + """; + List allTokens = new HfqlLexer(input).allTokens(); + assertThat(allTokens, contains( + "from", "Patient", "where", + "name.given", "=", "'Foo ' Chalmers'", + "select", "name.given[0]", + ",", "name.family" + )); + + } + + @Test + void testSearchParamWithQualifiers() { + String input = """ + from + Patient + search + _has:Observation:subject:device.identifier='1234-5' + select + name.family + """; + HfqlLexer hfqlLexer = new HfqlLexer(input); + + assertEquals("from", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("Patient", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("search", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("_has:Observation:subject:device.identifier", hfqlLexer.getNextToken(HfqlLexerOptions.SEARCH_PARAMETER_NAME).getToken()); + assertEquals("=", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("'1234-5'", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("select", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("name.family", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + + } + + @Test + void testInList() { + String input = """ + from StructureDefinition + where url in ('foo' | 'bar') + select + Name: name, + URL: url + """; + List allTokens = new HfqlLexer(input).allTokens(); + assertThat(allTokens, contains( + "from", "StructureDefinition", "where", + "url", "in", "(", "'foo'", "|", "'bar'", ")", + "select", + "Name", ":", "name", ",", + "URL", ":", "url" + )); + } + + @Test + void testFhirPathSelector() { + String input = """ + from Patient + select + ( Observation.value.ofType ( Quantity ) ).unit, + name.family.length() + """; + HfqlLexer lexer = new HfqlLexer(input); + assertEquals("from", lexer.getNextToken().getToken()); + assertEquals("Patient", lexer.getNextToken().getToken()); + assertEquals("select", lexer.getNextToken().getToken()); + assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken()); + assertEquals(",", lexer.getNextToken().getToken()); + assertEquals("name.family.length()", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken()); + } + + + @Test + void testOptionChangeIsRespected() { + // Setup + String input = """ + from Patient + select + ( Observation.value.ofType ( Quantity ) ).unit, + name.family.length() + """; + HfqlLexer lexer = new HfqlLexer(input); + assertEquals("from", lexer.getNextToken().getToken()); + assertEquals("Patient", lexer.getNextToken().getToken()); + assertEquals("select", lexer.getNextToken().getToken()); + + // Test + Verify + assertEquals("(", lexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.peekNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken()); + assertEquals("(", lexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken()); + assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken()); + } + + @ParameterizedTest + @CsvSource({ + "token1 token2 'token3, HFQL_TOKEN", + "foo.bar(blah, FHIRPATH_EXPRESSION", + "foo.bar((blah.baz), FHIRPATH_EXPRESSION", + }) + void testIncompleteFragment_String(String theInput, HfqlLexerOptions theOptions) { + HfqlLexer lexer = new HfqlLexer(theInput); + try { + while (lexer.hasNextToken(theOptions)) { + lexer.consumeNextToken(); + } + fail(); + } catch (InvalidRequestException e) { + assertThat(e.getMessage(), containsString("Unexpected end of string")); + } + } + + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParserTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParserTest.java new file mode 100644 index 00000000000..abea2b22c67 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/parser/HfqlStatementParserTest.java @@ -0,0 +1,615 @@ +package ca.uhn.fhir.jpa.fql.parser; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.parser.DataFormatException; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; + +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; +import static org.junit.jupiter.api.Assertions.*; + +@TestMethodOrder(MethodOrderer.MethodName.class) +@SuppressWarnings("SqlDialectInspection") +public class HfqlStatementParserTest { + + private static final FhirContext ourCtx = FhirContext.forR4Cached(); + + @Test + public void testCountAndGroup() { + String input = """ + select Count(*), name.given, name.family + from Patient + group by name.given, name.family + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(3, statement.getSelectClauses().size()); + assertEquals("*", statement.getSelectClauses().get(0).getClause()); + assertEquals(HfqlStatement.SelectClauseOperator.COUNT, statement.getSelectClauses().get(0).getOperator()); + assertEquals("Count(*)", statement.getSelectClauses().get(0).getAlias()); + assertEquals("name.given", statement.getSelectClauses().get(1).getClause()); + assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(1).getOperator()); + assertEquals("name.family", statement.getSelectClauses().get(2).getClause()); + assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(2).getOperator()); + assertEquals(2, statement.getGroupByClauses().size()); + assertThat(statement.getGroupByClauses(), contains("name.given", "name.family")); + + } + + + @Test + public void testFromSelect() { + String input = """ + from Patient + select + name.given[0], + name.family + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(2, statement.getSelectClauses().size()); + assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause()); + assertEquals("name.given[0]", statement.getSelectClauses().get(0).getAlias()); + assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(0).getOperator()); + assertEquals("name.family", statement.getSelectClauses().get(1).getClause()); + assertEquals("name.family", statement.getSelectClauses().get(1).getAlias()); + } + + @Test + public void testSelect_SearchMatchWithEscapedCommaInArgument() { + String input = """ + select name.given + from Patient + where + id in search_match('name', 'A,B\\,B') + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("name.given", statement.getSelectClauses().get(0).getClause()); + assertEquals("name.given", statement.getSelectClauses().get(0).getAlias()); + assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(0).getOperator()); + + assertEquals(1, statement.getWhereClauses().size()); + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'name'", "'A,B\\,B'")); + + } + + @Test + public void testSelect_ValueWithPrefix() { + String input = """ + SELECT id + FROM Observation + WHERE + id in search_match('value-quantity', 'lt500') AND + Patient.meta.versionId = '2' AND + value.ofType(string).lower().contains('running') + ORDER BY id DESC + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(3, statement.getWhereClauses().size()); + + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'value-quantity'", "'lt500'")); + + assertEquals("Patient.meta.versionId", statement.getWhereClauses().get(1).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(1).getOperator()); + assertThat(statement.getWhereClauses().get(1).getRight(), contains("'2'")); + + assertEquals("value.ofType(string).lower().contains('running')", statement.getWhereClauses().get(2).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(2).getOperator()); + assertThat(statement.getWhereClauses().get(2).getRight(), empty()); + + assertEquals(1, statement.getOrderByClauses().size()); + assertEquals("id", statement.getOrderByClauses().get(0).getClause()); + assertFalse(statement.getOrderByClauses().get(0).isAscending()); + } + + @Test + public void testWhere_UnaryBooleanAsLastStatement() { + String input = """ + SELECT id + FROM Observation + WHERE + id in search_match('code', 'http://loinc.org|34752-6') + AND + value.ofType(string).lower().contains('running') + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(2, statement.getWhereClauses().size()); + + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'code'", "'http://loinc.org|34752-6'")); + + assertEquals("value.ofType(string).lower().contains('running')", statement.getWhereClauses().get(1).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(1).getOperator()); + assertThat(statement.getWhereClauses().get(1).getRight(), empty()); + } + + + @Test + public void testSelectFrom() { + String input = """ + select + name.given[0], + name.family + from Patient + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(2, statement.getSelectClauses().size()); + assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause()); + assertEquals("name.given[0]", statement.getSelectClauses().get(0).getAlias()); + assertEquals("name.family", statement.getSelectClauses().get(1).getClause()); + assertEquals("name.family", statement.getSelectClauses().get(1).getAlias()); + } + + @Test + public void testSelectComplexFhirPath_StringConcat() { + String input = """ + SELECT FullName: Patient.name.given + ' ' + Patient.name.family + FROM Patient + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("Patient.name.given + ' ' + Patient.name.family", statement.getSelectClauses().get(0).getClause()); + assertEquals("FullName", statement.getSelectClauses().get(0).getAlias()); + + } + + @Test + public void testSelectComplexFhirPath_StringConcat2() { + String input = """ + SELECT + COL1: identifier[0].system + '|' + identifier[0].value, + identifier[0].system + '|' + identifier[0].value AS COL2, + identifier[0].system + '|' + identifier[0].value + FROM + Patient + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(3, statement.getSelectClauses().size()); + assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(0).getClause()); + assertEquals("COL1", statement.getSelectClauses().get(0).getAlias()); + assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(1).getClause()); + assertEquals("COL2", statement.getSelectClauses().get(1).getAlias()); + assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(2).getClause()); + assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(2).getAlias()); + + } + + @Test + public void testSelectDuplicateColumnsWithNoAlias() { + String input = """ + SELECT + name, name, name + FROM + Patient + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(3, statement.getSelectClauses().size()); + assertEquals("name", statement.getSelectClauses().get(0).getClause()); + assertEquals("name", statement.getSelectClauses().get(0).getAlias()); + assertEquals("name", statement.getSelectClauses().get(1).getClause()); + assertEquals("name2", statement.getSelectClauses().get(1).getAlias()); + assertEquals("name", statement.getSelectClauses().get(2).getClause()); + assertEquals("name3", statement.getSelectClauses().get(2).getAlias()); + + } + + @Test + public void testSelectAs() { + String input = """ + SELECT Patient.name.given + ' ' + Patient.name.family as FullName + FROM Patient + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("Patient.name.given + ' ' + Patient.name.family", statement.getSelectClauses().get(0).getClause()); + assertEquals("FullName", statement.getSelectClauses().get(0).getAlias()); + + } + + @Test + public void testSelectWhere_GreaterThan() { + String input = """ + select id + from Observation + where + value.ofType(Quantity).value > 100 + """; + + HfqlStatement statement = parse(input); + assertEquals(1, statement.getWhereClauses().size()); + assertEquals("value.ofType(Quantity).value > 100", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(0).getOperator()); + assertEquals(0, statement.getWhereClauses().get(0).getRight().size()); + } + + @Test + public void testSelectOrderBy() { + String input = """ + select id, name.family + from Observation + order by name.family, count(*) + """; + + HfqlStatement statement = parse(input); + assertThat(statement.getSelectClauses().stream().map(t -> t.getAlias()).collect(Collectors.toList()), contains( + "id", "name.family" + )); + assertEquals(2, statement.getOrderByClauses().size()); + assertEquals("name.family", statement.getOrderByClauses().get(0).getClause()); + assertTrue(statement.getOrderByClauses().get(0).isAscending()); + assertEquals("count(*)", statement.getOrderByClauses().get(1).getClause()); + assertTrue(statement.getOrderByClauses().get(1).isAscending()); + } + + @Test + public void testSelectOrderBy_Directional() { + String input = """ + select id, name.family + from Observation + order by name.family DESC, id ASC + """; + + HfqlStatement statement = parse(input); + assertThat(statement.getSelectClauses().stream().map(t -> t.getAlias()).collect(Collectors.toList()), contains( + "id", "name.family" + )); + assertEquals(2, statement.getOrderByClauses().size()); + assertEquals("name.family", statement.getOrderByClauses().get(0).getClause()); + assertFalse(statement.getOrderByClauses().get(0).isAscending()); + assertEquals("id", statement.getOrderByClauses().get(1).getClause()); + assertTrue(statement.getOrderByClauses().get(1).isAscending()); + } + + private HfqlStatement parse(String theInput) { + return new HfqlStatementParser(ourCtx, theInput).parse(); + } + + @Test + public void testFromWhereSelect() { + String input = """ + from + Patient + where + name.given = 'Foo \\' Chalmers' and + name.family = 'blah' + select + name.given[0], + name.family + """; + + HfqlStatement statement = parse(input); + assertEquals("Patient", statement.getFromResourceName()); + assertEquals(2, statement.getSelectClauses().size()); + assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause()); + assertEquals("name.family", statement.getSelectClauses().get(1).getClause()); + assertEquals(2, statement.getWhereClauses().size()); + assertEquals("name.given", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'Foo ' Chalmers'")); + assertEquals("name.family", statement.getWhereClauses().get(1).getLeft()); + assertThat(statement.getWhereClauses().get(1).getRight(), contains("'blah'")); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(1).getOperator()); + } + + @Test + public void testFromSearchWhereSelect() { + String input = """ + from + Observation + where + subject.name in ('foo' | 'bar') + and + id in search_match('_id', '123') + and + status = 'final' + select + id + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("id", statement.getSelectClauses().get(0).getClause()); + assertEquals(3, statement.getWhereClauses().size()); + assertEquals("subject.name", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.IN, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'foo'", "'bar'")); + assertEquals("id", statement.getWhereClauses().get(1).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(1).getOperator()); + assertThat(statement.getWhereClauses().get(1).getRight(), contains("'_id'", "'123'")); + assertEquals("status", statement.getWhereClauses().get(2).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(2).getOperator()); + assertThat(statement.getWhereClauses().get(2).getRight(), contains("'final'")); + + } + + @Test + public void testFromWhereSelect_RichSearchExpression() { + String input = """ + from + Observation + where + id in search_match('_has:Observation:subject:device.identifier', '1234-5') + select + id + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("id", statement.getSelectClauses().get(0).getClause()); + assertEquals(1, statement.getWhereClauses().size()); + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'_has:Observation:subject:device.identifier'", "'1234-5'")); + + } + + @Test + public void testFromSearchWhereSelectLimit() { + String input = """ + from + Observation + where + id in search_match('subject.name', 'foo', 'bar') + and + id in search_match('_id', '123') + and + id in search_match('status', 'final') + select + id + limit 123 + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(1, statement.getSelectClauses().size()); + assertEquals("id", statement.getSelectClauses().get(0).getClause()); + assertEquals(3, statement.getWhereClauses().size()); + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains("'subject.name'", "'foo'", "'bar'")); + assertEquals("id", statement.getWhereClauses().get(1).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(1).getOperator()); + assertThat(statement.getWhereClauses().get(1).getRight(), contains("'_id'", "'123'")); + assertEquals("id", statement.getWhereClauses().get(2).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(2).getOperator()); + assertThat(statement.getWhereClauses().get(2).getRight(), contains("'status'", "'final'")); + assertEquals(123, statement.getLimit()); + } + + @Test + public void testNamedSelectClauseWithFhirPath() { + String input = """ + select + Weight: value.ofType(Quantity).value, + Unit: value.ofType(Quantity).unit + from Observation + """; + + HfqlStatement statement = parse(input); + assertEquals("Observation", statement.getFromResourceName()); + assertEquals(2, statement.getSelectClauses().size()); + assertEquals("value.ofType(Quantity).value", statement.getSelectClauses().get(0).getClause()); + assertEquals("Weight", statement.getSelectClauses().get(0).getAlias()); + assertEquals("value.ofType(Quantity).unit", statement.getSelectClauses().get(1).getClause()); + assertEquals("Unit", statement.getSelectClauses().get(1).getAlias()); + + } + + + @Test + public void testFromWhereSelect_InClauseAndNamedSelects() { + // One select with spaces, one without + String input = """ + from + StructureDefinition + where + id in search_match('url', 'foo', 'bar') + select + Name : name, + URL:url + """; + HfqlStatement statement = parse(input); + assertEquals("StructureDefinition", statement.getFromResourceName()); + assertEquals(2, statement.getSelectClauses().size()); + assertEquals("name", statement.getSelectClauses().get(0).getClause()); + assertEquals("Name", statement.getSelectClauses().get(0).getAlias()); + assertEquals("url", statement.getSelectClauses().get(1).getClause()); + assertEquals("URL", statement.getSelectClauses().get(1).getAlias()); + assertEquals(1, statement.getWhereClauses().size()); + assertEquals("id", statement.getWhereClauses().get(0).getLeft()); + assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator()); + assertThat(statement.getWhereClauses().get(0).getRight(), contains( + "'url'", "'foo'", "'bar'" + )); + + } + + @Test + public void testError_InvalidStart() { + String input = """ + blah"""; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token (expected \"SELECT\") at position [line=0, column=0]: blah", ex.getMessage()); + } + + @Test + public void testError_DuplicateSelectAliases() { + String input = """ + SELECT id as id, name as id + FROM Patient + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("HAPI-2414: Duplicate SELECT column alias: id", ex.getMessage()); + } + + @Test + public void testError_InvalidOrder() { + String input = """ + select id + from Patient + order foo + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token (expected \"BY\") at position [line=2, column=6]: foo", ex.getMessage()); + } + + @Test + public void testError_InvalidFrom() { + String input = """ + from Blah"""; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Invalid FROM statement. Unknown resource type 'Blah' at position: [line=0, column=5]", ex.getMessage()); + } + + @Test + public void testError_InvalidLimit() { + String input = """ + from Patient + select name.given + limit foo + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token (expected integer value) at position [line=2, column=6]: foo", ex.getMessage()); + } + + @Test + public void testError_InvalidSelect_EqualsParens() { + String input = """ + from + Patient + where + name.given = ('Foo') + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token (expected quoted string) at position [line=3, column=3]: (", ex.getMessage()); + } + + @Test + public void testError_InvalidSelect_InWithoutParens() { + String input = """ + from + Patient + where + name.given in 'Foo' + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token (expected \"(\") at position [line=3, column=14]: in", ex.getMessage()); + } + + @Test + public void testError_InvalidSelect_InWithoutPipe() { + String input = """ + from + Patient + where + name.given in ('foo' 'bar') + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token at position [line=3, column=22]: 'bar'", ex.getMessage()); + } + + @Test + public void testError_InvalidSelect_InWithoutContent() { + String input = """ + from + Patient + where + name.given in + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected end of stream", ex.getMessage()); + } + + @Test + public void testError_InvalidSelect_InWithoutEnd() { + String input = """ + from + Patient + where + name.given in ('foo' | 'bar' + """; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected end of stream", ex.getMessage()); + } + + @Test + public void testError_MultipleWhere() { + String input = """ + from + Patient + where + _id = '123' + where + name.family = 'Foo' + select + name.given[0], + name.family + """; + + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token at position [line=4, column=0]: where", ex.getMessage()); + } + + @Test + public void testError_MultipleFrom() { + String input = """ + from + Patient + select + name.given[0], + name.family + from + Patient + """; + + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected token at position [line=5, column=0]: from", ex.getMessage()); + } + + @Test + public void testError_NoText() { + String input = " \n "; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected end of stream (expected \"FROM\")", ex.getMessage()); + } + + @Test + public void testError_MissingSelect() { + String input = """ + from Patient where"""; + DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input)); + assertEquals("Unexpected end of stream (expected \"SELECT\")", ex.getMessage()); + } + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderTest.java b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderTest.java new file mode 100644 index 00000000000..524e0d6910a --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/java/ca/uhn/fhir/jpa/fql/provider/HfqlRestProviderTest.java @@ -0,0 +1,239 @@ +package ca.uhn.fhir.jpa.fql.provider; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.rest.client.apache.ResourceEntity; +import ca.uhn.fhir.test.utilities.HttpClientExtension; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; +import ca.uhn.fhir.util.JsonUtil; +import ca.uhn.fhir.util.VersionUtil; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.hl7.fhir.r4.model.CodeType; +import org.hl7.fhir.r4.model.IntegerType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClientTest.createFakeStatement; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.notNull; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.isNull; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class HfqlRestProviderTest { + + private static final FhirContext ourCtx = FhirContext.forR4Cached(); + private static final Logger ourLog = LoggerFactory.getLogger(HfqlRestProviderTest.class); + @RegisterExtension + public HttpClientExtension myHttpClient = new HttpClientExtension(); + @Mock + private IHfqlExecutor myFqlExecutor; + @Mock + private IHfqlExecutionResult myMockFqlResult; + @InjectMocks + private HfqlRestProvider myProvider = new HfqlRestProvider(); + @RegisterExtension + public RestfulServerExtension myServer = new RestfulServerExtension(ourCtx) + .registerProvider(myProvider); + @Captor + private ArgumentCaptor myStatementCaptor; + @Captor + private ArgumentCaptor myLimitCaptor; + @Captor + private ArgumentCaptor myOffsetCaptor; + + @Test + public void testExecuteInitialSearch() throws IOException { + // Setup + HfqlStatement statement = createFakeStatement(); + when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")), + new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge")) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(999); + + String select = "from Patient select foo"; + Parameters request = new Parameters(); + request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH)); + request.addParameter(HfqlConstants.PARAM_QUERY, new StringType(select)); + request.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(100)); + HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE); + fetch.setEntity(new ResourceEntity(ourCtx, request)); + + // Test + try (CloseableHttpResponse response = myHttpClient.execute(fetch)) { + + // Verify + String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + String expected = """ + 1,HAPI FHIR THE-VERSION + my-search-id,999,"{""select"":[{""clause"":""name[0].family"",""alias"":""name[0].family"",""operator"":""SELECT"",""dataType"":""STRING""},{""clause"":""name[0].given[0]"",""alias"":""name[0].given[0]"",""operator"":""SELECT"",""dataType"":""STRING""}],""fromResourceName"":""Patient""}" + 0,Simpson,Homer + 3,Simpson,Marge + """.replace("THE-VERSION", VersionUtil.getVersion()); + assertEquals(expected.trim(), outcome.trim()); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;")); + + verify(myFqlExecutor, times(1)).executeInitialSearch(myStatementCaptor.capture(), myLimitCaptor.capture(), notNull()); + assertEquals(select, myStatementCaptor.getValue()); + assertEquals(100, myLimitCaptor.getValue()); + } + } + + @Test + public void testExecuteContinuation() throws IOException { + // Setup + when(myFqlExecutor.executeContinuation(any(), any(), anyInt(), isNull(), any())).thenReturn(myMockFqlResult); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(4, List.of("Simpson", "Homer")), + new IHfqlExecutionResult.Row(6, List.of("Simpson", "Marge")) + ); + when(myMockFqlResult.getSearchId()).thenReturn("my-search-id"); + when(myMockFqlResult.getLimit()).thenReturn(-1); + + String continuation = "the-continuation-id"; + Parameters request = new Parameters(); + request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION)); + request.addParameter(HfqlConstants.PARAM_CONTINUATION, new StringType(continuation)); + request.addParameter(HfqlConstants.PARAM_STATEMENT, new StringType(JsonUtil.serialize(createFakeStatement()))); + request.addParameter(HfqlConstants.PARAM_OFFSET, new IntegerType(99)); + ourLog.info("Request: {}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(request)); + HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE); + fetch.setEntity(new ResourceEntity(ourCtx, request)); + + // Test + try (CloseableHttpResponse response = myHttpClient.execute(fetch)) { + + // Verify + String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + String expected = """ + 1,HAPI FHIR THE-VERSION + my-search-id,-1, + 4,Simpson,Homer + 6,Simpson,Marge + """.replace("THE-VERSION", VersionUtil.getVersion()); + assertEquals(expected.trim(), outcome.trim()); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;")); + + verify(myFqlExecutor, times(1)).executeContinuation(any(), myStatementCaptor.capture(), myOffsetCaptor.capture(), myLimitCaptor.capture(), notNull()); + assertEquals(continuation, myStatementCaptor.getValue()); + assertEquals(null, myLimitCaptor.getValue()); + assertEquals(99, myOffsetCaptor.getValue()); + } + } + + + @Test + public void testIntrospectTables() throws IOException { + // Setup + when(myFqlExecutor.introspectTables()).thenReturn(myMockFqlResult); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + HfqlStatement statement = new HfqlStatement(); + statement.addSelectClauseAndAlias("TABLE_NAME").setDataType(HfqlDataTypeEnum.STRING); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("Account")), + new IHfqlExecutionResult.Row(6, List.of("Patient")) + ); + when(myMockFqlResult.getSearchId()).thenReturn(null); + when(myMockFqlResult.getLimit()).thenReturn(-1); + + Parameters request = new Parameters(); + request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES)); + HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE); + fetch.setEntity(new ResourceEntity(ourCtx, request)); + + // Test + try (CloseableHttpResponse response = myHttpClient.execute(fetch)) { + + // Verify + String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + String expected = """ + 1,HAPI FHIR THE-VERSION + ,-1,"{""select"":[{""clause"":""TABLE_NAME"",""alias"":""TABLE_NAME"",""operator"":""SELECT"",""dataType"":""STRING""}]}" + 0,Account + 6,Patient + """.replace("THE-VERSION", VersionUtil.getVersion()); + assertEquals(expected.trim(), outcome.trim()); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;")); + } + + } + + + @Test + public void testIntrospectColumns() throws IOException { + // Setup + when(myFqlExecutor.introspectColumns(eq("FOO"), eq("BAR"))).thenReturn(myMockFqlResult); + when(myMockFqlResult.hasNext()).thenReturn(true, true, false); + HfqlStatement statement = new HfqlStatement(); + statement.addSelectClauseAndAlias("COLUMN_NAME").setDataType(HfqlDataTypeEnum.STRING); + when(myMockFqlResult.getStatement()).thenReturn(statement); + when(myMockFqlResult.getNextRow()).thenReturn( + new IHfqlExecutionResult.Row(0, List.of("FOO")), + new IHfqlExecutionResult.Row(6, List.of("BAR")) + ); + when(myMockFqlResult.getSearchId()).thenReturn(null); + when(myMockFqlResult.getLimit()).thenReturn(-1); + + Parameters request = new Parameters(); + request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS)); + request.addParameter(HfqlConstants.PARAM_INTROSPECT_TABLE_NAME, new StringType("FOO")); + request.addParameter(HfqlConstants.PARAM_INTROSPECT_COLUMN_NAME, new StringType("BAR")); + HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE); + fetch.setEntity(new ResourceEntity(ourCtx, request)); + + // Test + try (CloseableHttpResponse response = myHttpClient.execute(fetch)) { + + // Verify + String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + String expected = """ + 1,HAPI FHIR THE-VERSION + ,-1,"{""select"":[{""clause"":""COLUMN_NAME"",""alias"":""COLUMN_NAME"",""operator"":""SELECT"",""dataType"":""STRING""}]}" + 0,FOO + 6,BAR + """.replace("THE-VERSION", VersionUtil.getVersion()); + assertEquals(expected.trim(), outcome.trim()); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;")); + } + + } + +} diff --git a/hapi-fhir-jpaserver-hfql/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-hfql/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..75165f58cb8 --- /dev/null +++ b/hapi-fhir-jpaserver-hfql/src/test/resources/logback-test.xml @@ -0,0 +1,14 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index 9484259e0e9..f7b267941e3 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 9682f1abbbc..7943e77cd7b 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,8 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT - + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/BlockRuleEvaluationSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/BlockRuleEvaluationSvcImpl.java index 7f64250f0cf..ec118655786 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/BlockRuleEvaluationSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/BlockRuleEvaluationSvcImpl.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Server - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.mdm.svc; import ca.uhn.fhir.context.FhirContext; diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index f05df031b67..a88f39da26c 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 4fd7e36c54a..75ac4f88f68 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index f08d38f0382..b684a83cc26 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index 1ecf01c6005..6d2f5a4d4f2 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 97bd4216cae..77ee2d9a1dd 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index 836fa0ce53d..96536fd13dd 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index 160e611d247..9516686f62e 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index fdcd284b49d..310f81afbd4 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java index beceaadf5c3..7411c213e5e 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.provider.DiffProvider; import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider; @@ -71,6 +72,7 @@ public abstract class BaseResourceProviderR5Test extends BaseJpaR5Test { s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class)); s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class)); s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)); + s.registerProvider(myAppCtx.getBean(HfqlRestProvider.class)); s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class)); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5FqlTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5FqlTest.java new file mode 100644 index 00000000000..53c76c0fd34 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5FqlTest.java @@ -0,0 +1,55 @@ +package ca.uhn.fhir.jpa.provider.r5; + +import ca.uhn.fhir.jpa.fql.util.HfqlConstants; +import ca.uhn.fhir.rest.client.apache.ResourceEntity; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.hl7.fhir.r5.model.IntegerType; +import org.hl7.fhir.r5.model.Parameters; +import org.hl7.fhir.r5.model.StringType; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ResourceProviderR5FqlTest extends BaseResourceProviderR5Test { + + @Test + public void testFqlQuery() throws IOException { + + // Setup + for (int i = 0; i < 20; i++) { + createPatient(withActiveTrue(), withIdentifier("foo", "bar"), withFamily("Simpson" + i), withGiven("Homer")); + } + + String select = """ + select name[0].family, name[0].given[0] + from Patient + where id in search_match('identifier', 'foo|bar') + """; + Parameters request = new Parameters(); + request.addParameter(HfqlConstants.PARAM_ACTION, new StringType(HfqlConstants.PARAM_ACTION_SEARCH)); + request.addParameter(HfqlConstants.PARAM_QUERY, new StringType(select)); + request.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(100)); + request.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(5)); + HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE); + fetch.setEntity(new ResourceEntity(myFhirContext, request)); + + // Test + try (CloseableHttpResponse response = ourHttpClient.execute(fetch)) { + + // Verify + assertEquals(200, response.getStatusLine().getStatusCode()); + String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + assertThat(outcome, containsString("0,Simpson0,Homer")); + assertThat(outcome, containsString("1,Simpson1,Homer")); + } + + } + +} diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index ae13c9c4701..072e6028bda 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -58,6 +58,11 @@ ${project.version} test + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-hfql + ${project.version} + org.hl7.fhir.testcases diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java index cb8d9ed8d74..f4d46f5d342 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java @@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl; import ca.uhn.fhir.jpa.config.HapiJpaConfig; import ca.uhn.fhir.jpa.config.r5.JpaR5Config; import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProviderCtxConfig; import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect; import ca.uhn.fhir.jpa.topic.SubscriptionTopicConfig; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; @@ -61,7 +62,8 @@ import static org.junit.jupiter.api.Assertions.fail; SubscriptionTopicConfig.class, JpaBatch2Config.class, Batch2JobsConfig.class, - TestHSearchAddInConfig.DefaultLuceneHeap.class + TestHSearchAddInConfig.DefaultLuceneHeap.class, + HfqlRestProviderCtxConfig.class }) public class TestR5Config { diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index edd5c0a393b..69f9825d6cf 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml @@ -59,6 +59,11 @@ hapi-fhir-jpaserver-ips ${project.version} + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-hfql + ${project.version} + com.helger diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index 47ba8f8ec7f..84e393ce4fd 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider; @@ -250,6 +251,7 @@ public class TestRestfulServer extends RestfulServer { providers.add(myAppCtx.getBean(JpaSystemProvider.class)); providers.add(myAppCtx.getBean(InstanceReindexProvider.class)); + providers.add(myAppCtx.getBean(HfqlRestProvider.class)); /* * On the DSTU2 endpoint, we want to enable ETag support diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java index 0db46aa8638..b28a9367462 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig; import ca.uhn.fhir.jpa.batch2.JpaBatch2Config; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProviderCtxConfig; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; @@ -34,14 +35,13 @@ import org.springframework.context.annotation.Import; SubscriptionSubmitterConfig.class, JpaBatch2Config.class, Batch2JobsConfig.class, - ThreadPoolFactoryConfig.class + ThreadPoolFactoryConfig.class, + HfqlRestProviderCtxConfig.class }) public class CommonConfig { /** * Do some fancy logging to create a nice access log that has details about each incoming request. - * - * @return */ @Bean public LoggingInterceptor accessLoggingInterceptor() { diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/test/java/ca/uhn/fhirtest/UhnFhirTestApp.java b/hapi-fhir-jpaserver-uhnfhirtest/src/test/java/ca/uhn/fhirtest/UhnFhirTestApp.java index d5888a9aa87..9b5dd2c68f5 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/test/java/ca/uhn/fhirtest/UhnFhirTestApp.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/test/java/ca/uhn/fhirtest/UhnFhirTestApp.java @@ -12,6 +12,8 @@ import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType; import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus; import org.hl7.fhir.instance.model.api.IIdType; +import java.sql.Driver; + import static ca.uhn.fhirtest.config.TestDstu3Config.FHIR_LUCENE_LOCATION_DSTU3; public class UhnFhirTestApp { @@ -20,6 +22,8 @@ public class UhnFhirTestApp { public static void main(String[] args) throws Exception { + org.h2.Driver.load(); + int myPort = 8889; String base = "http://localhost:" + myPort + "/baseR4"; diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 02b890a9402..385e40cba23 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 419c8cec6a3..591eefa6abd 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListJson.java index 3354030f87f..06582912e81 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListJson.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.blocklist.json; import ca.uhn.fhir.model.api.IModelJson; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListRuleJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListRuleJson.java index 954b5cdbbb1..5acf8eab6ca 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListRuleJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockListRuleJson.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.blocklist.json; import ca.uhn.fhir.model.api.IModelJson; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockedFieldJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockedFieldJson.java index 89aa40832dc..181f2597749 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockedFieldJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/json/BlockedFieldJson.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.blocklist.json; import ca.uhn.fhir.model.api.IModelJson; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockListRuleProvider.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockListRuleProvider.java index a761f60ae6d..98df4c067d9 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockListRuleProvider.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockListRuleProvider.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.blocklist.svc; import ca.uhn.fhir.mdm.blocklist.json.BlockListJson; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockRuleEvaluationSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockRuleEvaluationSvc.java index 836235859d1..9231912f707 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockRuleEvaluationSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/blocklist/svc/IBlockRuleEvaluationSvc.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.blocklist.svc; import org.hl7.fhir.instance.model.api.IAnyResource; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/HapiNumericSimilarity.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/HapiNumericSimilarity.java index 96aa0f0c5ec..f3bb3962165 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/HapiNumericSimilarity.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/HapiNumericSimilarity.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Master Data Management + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.mdm.rules.similarity; import ca.uhn.fhir.context.FhirContext; diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 1310d658df0..c9fa2e74c2f 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 3cbf7ba03bd..0b53a996257 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java index 0f4e9ad4350..b9f0ee2cf8d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java @@ -25,6 +25,7 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; @@ -61,6 +62,15 @@ public class SimpleBundleProvider implements IBundleProvider { this(theList, null); } + /** + * Constructor + * + * @since 6.8.0 + */ + public SimpleBundleProvider(IBaseResource... theList) { + this(Arrays.asList(theList), null); + } + public SimpleBundleProvider(List theList, String theUuid) { myList = theList; myUuid = theUuid; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java index 0a9435c84f1..40c74261f2f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java @@ -159,7 +159,7 @@ public class LoggingInterceptor { StringLookup lookup = new MyLookup(theServletRequest, theException, theRequestDetails); StringSubstitutor subs = new StringSubstitutor(lookup, "${", "}", '\\'); - // Actuall log the line + // Actually log the line String line = subs.replace(myErrorMessageFormat); myLogger.info(line); } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index cd3e6ff8e96..8362787a73e 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index f16508d221f..970b932c69d 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 6df3fa323c4..02e08752241 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index acecfa6335e..a4ed61d50b8 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index 8cd85a97f27..c35da94f679 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 4c8870e87a9..0304c22d60f 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 528a49f0de9..44fcf6beafc 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index fd04ab88699..49fecaf11bb 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index a13f1a9c3c1..47351dd792a 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 8b655dfcb5e..4476b0f1980 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index fad308a93b7..7baf3ecdccd 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index d227f9afd6b..7ac8c6d0685 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index 2ac6a404a36..add3d4f0070 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 088f5b64278..f1cc3fea2b3 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index b33a242385b..c892b18b709 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 946474ba180..c3bc64f4fd4 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 0e067c2195b..40d037e4ba1 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 3ab563f001b..749bbd937c8 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index 7c43992c8c0..be63f1af44f 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 4c7205fad8f..36fa3a79489 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index 0877617b503..f258cfc11ce 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 68c23223669..1551e8e737f 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 1e3aab6e6b6..1a9aefc3675 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java index 378da6bbf91..9cb7278e2be 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java @@ -8,6 +8,7 @@ import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext; import ca.uhn.fhir.i18n.Msg; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.model.Base; +import org.hl7.fhir.dstu3.model.ExpressionNode; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.TypeDetails; import org.hl7.fhir.dstu3.utils.FHIRPathEngine; @@ -21,7 +22,7 @@ import javax.annotation.Nonnull; public class FhirPathDstu3 implements IFhirPath { - private FHIRPathEngine myEngine; + private final FHIRPathEngine myEngine; public FhirPathDstu3(FhirContext theCtx) { IValidationSupport validationSupport = theCtx.getValidationSupport(); @@ -31,22 +32,40 @@ public class FhirPathDstu3 implements IFhirPath { @SuppressWarnings("unchecked") @Override public List evaluate(IBase theInput, String thePath, Class theReturnType) { + ExpressionNode parsed; + try { + parsed = myEngine.parse(thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(2408) + e); + } + return (List) evaluate(theInput, parsed, theReturnType); + } + + @SuppressWarnings("unchecked") + @Override + public List evaluate( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + ExpressionNode expressionNode = ((ParsedExpression) theParsedExpression).myParsedExpression; + return (List) evaluate(theInput, expressionNode, theReturnType); + } + + @Nonnull + private List evaluate( + IBase theInput, ExpressionNode expressionNode, Class theReturnType) { List result; try { - result = myEngine.evaluate((Base) theInput, thePath); + result = myEngine.evaluate((Base) theInput, expressionNode); } catch (FHIRException e) { throw new FhirPathExecutionException(Msg.code(607) + e); } - for (Base next : result) { + for (IBase next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException( - Msg.code(608) + "FluentPath expression \"" + thePath + "\" returned unexpected type " - + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException(Msg.code(608) + "FhirPath expression returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } - - return (List) result; + return result; } @Override @@ -55,8 +74,14 @@ public class FhirPathDstu3 implements IFhirPath { } @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); + public Optional evaluateFirst( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + return evaluate(theInput, theParsedExpression, theReturnType).stream().findFirst(); + } + + @Override + public IParsedExpression parse(String theExpression) { + return new ParsedExpression(myEngine.parse(theExpression)); } @Override @@ -100,4 +125,13 @@ public class FhirPathDstu3 implements IFhirPath { } }); } + + private static class ParsedExpression implements IParsedExpression { + + private final ExpressionNode myParsedExpression; + + public ParsedExpression(ExpressionNode theParsedExpression) { + myParsedExpression = theParsedExpression; + } + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 1b7b695b925..d9241cd91f9 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 68a6783f394..bc88f2807da 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java index 72d6396638c..c8c27d02a28 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java @@ -11,6 +11,7 @@ import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.r4.model.Base; +import org.hl7.fhir.r4.model.ExpressionNode; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.TypeDetails; import org.hl7.fhir.r4.model.ValueSet; @@ -35,22 +36,40 @@ public class FhirPathR4 implements IFhirPath { @SuppressWarnings("unchecked") @Override public List evaluate(IBase theInput, String thePath, Class theReturnType) { + ExpressionNode parsed; + try { + parsed = myEngine.parse(thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(2409) + e); + } + return (List) evaluate(theInput, parsed, theReturnType); + } + + @SuppressWarnings("unchecked") + @Override + public List evaluate( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + ExpressionNode expressionNode = ((ParsedExpression) theParsedExpression).myParsedExpression; + return (List) evaluate(theInput, expressionNode, theReturnType); + } + + @Nonnull + private List evaluate( + IBase theInput, ExpressionNode expressionNode, Class theReturnType) { List result; try { - result = myEngine.evaluate((Base) theInput, thePath); + result = myEngine.evaluate((Base) theInput, expressionNode); } catch (FHIRException e) { - throw new FhirPathExecutionException(Msg.code(255) + e); + throw new FhirPathExecutionException(Msg.code(255) + e.getMessage(), e); } - for (Base next : result) { + for (IBase next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException( - Msg.code(256) + "FluentPath expression \"" + thePath + "\" returned unexpected type " - + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException(Msg.code(256) + "FhirPath expression returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } - - return (List) result; + return result; } @Override @@ -59,8 +78,14 @@ public class FhirPathR4 implements IFhirPath { } @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); + public Optional evaluateFirst( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + return evaluate(theInput, theParsedExpression, theReturnType).stream().findFirst(); + } + + @Override + public IParsedExpression parse(String theExpression) { + return new ParsedExpression(myEngine.parse(theExpression)); } @Override @@ -116,4 +141,13 @@ public class FhirPathR4 implements IFhirPath { } }); } + + private static class ParsedExpression implements IParsedExpression { + + private final ExpressionNode myParsedExpression; + + public ParsedExpression(ExpressionNode theParsedExpression) { + myParsedExpression = theParsedExpression; + } + } } diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/BinaryClientTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/BinaryClientTest.java index 24749141df2..56f6d21f437 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/BinaryClientTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/BinaryClientTest.java @@ -29,6 +29,8 @@ import org.mockito.ArgumentCaptor; import org.mockito.internal.stubbing.defaultanswers.ReturnsDeepStubs; import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; @@ -122,6 +124,7 @@ public class BinaryClientTest { } + private interface IClient extends IBasicClient { @Read(type = Binary.class) diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java index 852c9e1cab4..cc7a3115efa 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java @@ -48,6 +48,7 @@ import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicStatusLine; +import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.hamcrest.core.StringContains; import org.hl7.fhir.instance.model.api.IBaseBundle; @@ -1023,6 +1024,40 @@ public class GenericClientR4Test extends BaseGenericClientR4Test { assertEquals("http://example.com/fhir/$opname", capt.getAllValues().get(0).getURI().toASCIIString()); } + @Test + public void testOperationReturningArbitraryBinaryContentTextual_ReturnResourceType() throws Exception { + IParser p = ourCtx.newXmlParser(); + + Parameters inputParams = new Parameters(); + inputParams.addParameter().setName("name").setValue(new BooleanType(true)); + + final String respString = "VALUE"; + ArgumentCaptor capt = ArgumentCaptor.forClass(HttpUriRequest.class); + when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse); + when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), 200, "OK")); + when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", "text/html")); + when(myHttpResponse.getEntity().getContent()).thenAnswer(t -> new ReaderInputStream(new StringReader(respString), StandardCharsets.UTF_8)); + when(myHttpResponse.getAllHeaders()).thenReturn(new Header[]{ + new BasicHeader("content-type", "text/html") + }); + + IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir"); + + Binary binary = client + .operation() + .onServer() + .named("opname") + .withParameters(inputParams) + .returnResourceType(Binary.class) + .execute(); + + assertEquals(respString, new String(binary.getContent(), Charsets.UTF_8)); + assertEquals("text/html", binary.getContentType()); + + assertEquals("http://example.com/fhir/$opname", capt.getAllValues().get(0).getURI().toASCIIString()); + } + + /** * Invoke an operation that returns HTML * as a response (a HAPI FHIR server could accomplish this by returning diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptorTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptorTest.java index 56f6853282a..bdc8a63bed9 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptorTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptorTest.java @@ -134,7 +134,7 @@ public class FhirPathFilterInterceptorTest { String responseText = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8); ourLog.info("Response:\n{}", responseText); assertEquals(400, response.getStatusLine().getStatusCode()); - assertThat(responseText, containsString(Msg.code(327) + "Error parsing FHIRPath expression: "+Msg.code(255) + "org.hl7.fhir.exceptions.PathEngineException: Error evaluating FHIRPath expression: left operand to * can only have 1 value, but has 8 values (@char 1)")); + assertThat(responseText, containsString("left operand to * can only have 1 value, but has 8 values")); } } diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 4fd819f4ea6..a9df6762948 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java index af901839eba..78cc8cbc1e4 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java @@ -11,6 +11,7 @@ import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r4b.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.r4b.model.Base; +import org.hl7.fhir.r4b.model.ExpressionNode; import org.hl7.fhir.r4b.model.IdType; import org.hl7.fhir.r4b.model.TypeDetails; import org.hl7.fhir.r4b.model.ValueSet; @@ -22,7 +23,7 @@ import javax.annotation.Nonnull; public class FhirPathR4B implements IFhirPath { - private FHIRPathEngine myEngine; + private final FHIRPathEngine myEngine; public FhirPathR4B(FhirContext theCtx) { IValidationSupport validationSupport = theCtx.getValidationSupport(); @@ -32,22 +33,40 @@ public class FhirPathR4B implements IFhirPath { @SuppressWarnings("unchecked") @Override public List evaluate(IBase theInput, String thePath, Class theReturnType) { + ExpressionNode parsed; + try { + parsed = myEngine.parse(thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(2410) + e); + } + return (List) evaluate(theInput, parsed, theReturnType); + } + + @SuppressWarnings("unchecked") + @Override + public List evaluate( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + ExpressionNode expressionNode = ((ParsedExpression) theParsedExpression).myParsedExpression; + return (List) evaluate(theInput, expressionNode, theReturnType); + } + + @Nonnull + private List evaluate( + IBase theInput, ExpressionNode expressionNode, Class theReturnType) { List result; try { - result = myEngine.evaluate((Base) theInput, thePath); + result = myEngine.evaluate((Base) theInput, expressionNode); } catch (FHIRException e) { throw new FhirPathExecutionException(Msg.code(2154) + e); } - for (Base next : result) { + for (IBase next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException( - Msg.code(2155) + "FluentPath expression \"" + thePath + "\" returned unexpected type " - + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException(Msg.code(2155) + "FhirPath expression returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } - - return (List) result; + return result; } @Override @@ -56,8 +75,14 @@ public class FhirPathR4B implements IFhirPath { } @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); + public Optional evaluateFirst( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + return evaluate(theInput, theParsedExpression, theReturnType).stream().findFirst(); + } + + @Override + public IParsedExpression parse(String theExpression) { + return new ParsedExpression(myEngine.parse(theExpression)); } @Override @@ -113,4 +138,13 @@ public class FhirPathR4B implements IFhirPath { } }); } + + private static class ParsedExpression implements IParsedExpression { + + private final ExpressionNode myParsedExpression; + + public ParsedExpression(ExpressionNode theParsedExpression) { + myParsedExpression = theParsedExpression; + } + } } diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 9d4ec1e2e1c..3d85ce48a16 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java index 79af5cdb729..eb5d3124b9c 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java @@ -11,6 +11,7 @@ import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.r5.model.Base; +import org.hl7.fhir.r5.model.ExpressionNode; import org.hl7.fhir.r5.model.IdType; import org.hl7.fhir.r5.model.TypeDetails; import org.hl7.fhir.r5.model.ValueSet; @@ -22,7 +23,7 @@ import javax.annotation.Nonnull; public class FhirPathR5 implements IFhirPath { - private FHIRPathEngine myEngine; + private final FHIRPathEngine myEngine; public FhirPathR5(FhirContext theCtx) { IValidationSupport validationSupport = theCtx.getValidationSupport(); @@ -30,25 +31,43 @@ public class FhirPathR5 implements IFhirPath { myEngine.setDoNotEnforceAsSingletonRule(true); } - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "unchecked"}) @Override public List evaluate(IBase theInput, String thePath, Class theReturnType) { + ExpressionNode parsed; + try { + parsed = myEngine.parse(thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(2411) + e); + } + return (List) evaluate(theInput, parsed, theReturnType); + } + + @SuppressWarnings("unchecked") + @Override + public List evaluate( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + ExpressionNode expressionNode = ((ParsedExpression) theParsedExpression).myParsedExpression; + return (List) evaluate(theInput, expressionNode, theReturnType); + } + + @Nonnull + private List evaluate( + IBase theInput, ExpressionNode expressionNode, Class theReturnType) { List result; try { - result = myEngine.evaluate((Base) theInput, thePath); + result = myEngine.evaluate((Base) theInput, expressionNode); } catch (FHIRException e) { throw new FhirPathExecutionException(Msg.code(198) + e); } - for (Base next : result) { + for (IBase next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException( - Msg.code(199) + "FluentPath expression \"" + thePath + "\" returned unexpected type " - + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException(Msg.code(199) + "FhirPath expression returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } - - return (List) result; + return result; } @Override @@ -57,8 +76,14 @@ public class FhirPathR5 implements IFhirPath { } @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); + public Optional evaluateFirst( + IBase theInput, IParsedExpression theParsedExpression, Class theReturnType) { + return evaluate(theInput, theParsedExpression, theReturnType).stream().findFirst(); + } + + @Override + public IParsedExpression parse(String theExpression) { + return new ParsedExpression(myEngine.parse(theExpression)); } @Override @@ -114,4 +139,13 @@ public class FhirPathR5 implements IFhirPath { } }); } + + private static class ParsedExpression implements IParsedExpression { + + private final ExpressionNode myParsedExpression; + + public ParsedExpression(ExpressionNode theParsedExpression) { + myParsedExpression = theParsedExpression; + } + } } diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index aca2a5ce85d..62ddd54bac4 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index db3587992ea..b8c034fc801 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml @@ -77,6 +77,13 @@ hapi-fhir-structures-r5 ${project.version} + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-hfql + ${project.version} + compile + + com.google.code.gson @@ -141,6 +148,10 @@ + + org.webjars.npm + ace-builds + org.webjars.npm bootstrap diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java index 6aca81d697d..0b9d14f3162 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java @@ -101,7 +101,7 @@ public class BaseController { return retVal.toArray(new Header[retVal.size()]); } - private String format(String theResultBody, EncodingEnum theEncodingEnum) { + private static String format(String theResultBody, EncodingEnum theEncodingEnum) { String str = StringEscapeUtils.escapeHtml4(theResultBody); if (str == null || theEncodingEnum == null) { return str; @@ -402,6 +402,11 @@ public class BaseController { theModel.put("requiredParamExtension", ExtensionConstants.PARAM_IS_REQUIRED); theModel.put("conf", capabilityStatement); + + boolean supportsHfql = capabilityStatement.getRestFirstRep().getOperation().stream() + .anyMatch(t -> "hfql-execute".equals(t.getName())); + theModel.put("supportsHfql", supportsHfql); + return capabilityStatement; } @@ -696,4 +701,8 @@ public class BaseController { } return retVal; } + + public static String formatAsJson(String theInput) { + return format(defaultString(theInput), EncodingEnum.JSON); + } } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java index e0ad5f3dfdd..93aff29f414 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java @@ -4,6 +4,11 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.jdbc.RemoteHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.parser.HfqlStatement; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum; import ca.uhn.fhir.model.primitive.BoundCodeDt; @@ -14,21 +19,14 @@ import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.client.impl.GenericClient; -import ca.uhn.fhir.rest.gclient.ICreateTyped; -import ca.uhn.fhir.rest.gclient.IHistory; -import ca.uhn.fhir.rest.gclient.IHistoryTyped; -import ca.uhn.fhir.rest.gclient.IHistoryUntyped; -import ca.uhn.fhir.rest.gclient.IQuery; -import ca.uhn.fhir.rest.gclient.IUntypedQuery; import ca.uhn.fhir.rest.gclient.NumberClientParam.IMatches; -import ca.uhn.fhir.rest.gclient.QuantityClientParam; import ca.uhn.fhir.rest.gclient.QuantityClientParam.IAndUnits; -import ca.uhn.fhir.rest.gclient.StringClientParam; -import ca.uhn.fhir.rest.gclient.TokenClientParam; +import ca.uhn.fhir.rest.gclient.*; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.to.model.HomeRequest; import ca.uhn.fhir.to.model.ResourceRequest; import ca.uhn.fhir.to.model.TransactionRequest; +import ca.uhn.fhir.to.util.HfqlRenderingUtil; import ca.uhn.fhir.util.StopWatch; import com.google.gson.stream.JsonWriter; import org.apache.commons.lang3.StringUtils; @@ -41,9 +39,12 @@ import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseConformance; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.Parameters; import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import java.io.IOException; import java.io.StringWriter; @@ -51,20 +52,20 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.TreeSet; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.rest.server.provider.ProviderConstants.DIFF_OPERATION_NAME; import static ca.uhn.fhir.util.UrlUtil.sanitizeUrlPart; -import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; -import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.*; @org.springframework.stereotype.Controller() public class Controller extends BaseController { static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(Controller.class); + public static final int ROW_LIMIT = 200; @RequestMapping(value = {"/about"}) public String actionAbout( @@ -202,7 +203,7 @@ public class Controller extends BaseController { final BindingResult theBindingResult, final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); - ourLog.info(theServletRequest.toString()); + theModel.put("page", "home"); return "home"; } @@ -337,6 +338,98 @@ public class Controller extends BaseController { return "resource"; } + @RequestMapping( + value = {"/hfql"}, + method = RequestMethod.GET) + public String actionHfqlHome( + HttpServletRequest theServletRequest, + @RequestParam(value = "hfql-query", required = false) String theHfqlQuery, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { + addCommonParamsForHfql(theServletRequest, theRequest, theModel); + + String query = theHfqlQuery; + if (isBlank(query)) { + query = "SELECT\n" + " id AS ID, meta.versionId AS Version,\n" + + " name[0].family AS FamilyName, name[0].given[0] AS GivenName,\n" + + " identifier AS Identifiers\n" + + "FROM\n" + + " Patient"; + } + + theModel.put("query", query); + return "hfql"; + } + + @RequestMapping( + value = {"/hfql"}, + method = RequestMethod.POST) + public String actionHfqlExecuteQuery( + HttpServletRequest theServletRequest, + @RequestParam("hfql-query") String theHfqlQuery, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { + addCommonParamsForHfql(theServletRequest, theRequest, theModel); + + ourLog.info("Executing HFQL query: {}", theHfqlQuery); + StopWatch sw = new StopWatch(); + + List> rows = new ArrayList<>(); + try { + IHfqlExecutionResult result = executeHfqlStatement(theServletRequest, theHfqlQuery, theRequest, ROW_LIMIT); + while (result.hasNext()) { + List nextRowValues = result.getNextRow().getRowValues(); + if (nextRowValues.size() >= 1) { + List nextRow = nextRowValues.stream() + .map(t -> t != null ? t.toString() : null) + .collect(Collectors.toList()); + rows.add(nextRow); + } + } + + List columnNames = result.getStatement().getSelectClauses().stream() + .map(HfqlStatement.SelectClause::getAlias) + .collect(Collectors.toList()); + theModel.put("columnNames", columnNames); + List columnTypes = result.getStatement().getSelectClauses().stream() + .map(t -> t.getDataType().name()) + .collect(Collectors.toList()); + theModel.put("columnTypes", columnTypes); + + } catch (IOException e) { + ourLog.warn("Failed to execute HFQL query: {}", e.toString()); + theModel.put("columnNames", List.of("Error")); + theModel.put("columnTypes", List.of(HfqlDataTypeEnum.STRING)); + rows = List.of(List.of(e.getMessage())); + } + + theModel.put("HfqlRenderingUtil", new HfqlRenderingUtil()); + theModel.put("query", theHfqlQuery); + theModel.put("resultRows", rows); + theModel.put("executionTime", sw.toString()); + + return "hfql"; + } + + @Nonnull + protected IHfqlExecutionResult executeHfqlStatement( + HttpServletRequest theServletRequest, String theHfqlQuery, HomeRequest theRequest, int theRowLimit) + throws IOException { + Parameters requestParameters = + HfqlRestProvider.newQueryRequestParameters(theHfqlQuery, theRowLimit, theRowLimit); + GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, null); + return new RemoteHfqlExecutionResult(requestParameters, client); + } + + protected org.hl7.fhir.r5.model.CapabilityStatement addCommonParamsForHfql( + HttpServletRequest theServletRequest, HomeRequest theRequest, ModelMap theModel) { + theModel.put("page", "hfql"); + theModel.put("rowLimit", ROW_LIMIT); + return super.addCommonParams(theServletRequest, theRequest, theModel); + } + private void populateModelForResource( HttpServletRequest theServletRequest, HomeRequest theRequest, ModelMap theModel) { org.hl7.fhir.r5.model.CapabilityStatement conformance = @@ -739,10 +832,6 @@ public class Controller extends BaseController { return "result"; } - private static ResultType getReturnedTypeBasedOnOperation(@Nullable String operationName) { - return DIFF_OPERATION_NAME.equals(operationName) ? ResultType.PARAMETERS : ResultType.BUNDLE; - } - private void doActionHistory( HttpServletRequest theReq, HomeRequest theRequest, @@ -1102,4 +1191,8 @@ public class Controller extends BaseController { return true; } + + private static ResultType getReturnedTypeBasedOnOperation(@Nullable String operationName) { + return DIFF_OPERATION_NAME.equals(operationName) ? ResultType.PARAMETERS : ResultType.BUNDLE; + } } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java index 7cc02fd2a19..4030fd750ee 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java @@ -24,6 +24,7 @@ public class FhirTesterMvcConfig implements WebMvcConfigurer { @Override public void addResourceHandlers(@Nonnull ResourceHandlerRegistry theRegistry) { + WebUtil.webJarAddAceBuilds(theRegistry); WebUtil.webJarAddBoostrap(theRegistry); WebUtil.webJarAddJQuery(theRegistry); WebUtil.webJarAddFontAwesome(theRegistry); diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java index 0ab72bc77a7..c7db6634533 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java @@ -13,6 +13,7 @@ import ca.uhn.fhir.to.Controller; import ca.uhn.fhir.to.TesterConfig; import org.springframework.web.bind.annotation.ModelAttribute; +import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.*; @@ -122,8 +123,11 @@ public class HomeRequest { HttpServletRequest theRequest, FhirContext theContext, TesterConfig theConfig, - Controller.CaptureInterceptor theInterceptor) { + @Nullable Controller.CaptureInterceptor theInterceptor) { theContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); + theContext.getRestfulClientFactory().setConnectTimeout(60 * 1000); + theContext.getRestfulClientFactory().setConnectionRequestTimeout(60 * 1000); + theContext.getRestfulClientFactory().setSocketTimeout(60 * 1000); GenericClient retVal; ITestingUiClientFactory clientFactory = theConfig.getClientFactory(); @@ -157,7 +161,9 @@ public class HomeRequest { } } - retVal.registerInterceptor(theInterceptor); + if (theInterceptor != null) { + retVal.registerInterceptor(theInterceptor); + } final String remoteAddr = org.slf4j.MDC.get("req.remoteAddr"); retVal.registerInterceptor(new IClientInterceptor() { diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/HfqlRenderingUtil.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/HfqlRenderingUtil.java new file mode 100644 index 00000000000..b60b859a8eb --- /dev/null +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/HfqlRenderingUtil.java @@ -0,0 +1,10 @@ +package ca.uhn.fhir.to.util; + +import ca.uhn.fhir.to.BaseController; + +public class HfqlRenderingUtil { + + public String formatAsJson(String theInput) { + return BaseController.formatAsJson(theInput); + } +} diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java index d2c16d0fff9..395831cfecb 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java @@ -47,6 +47,10 @@ public class WebUtil { .addResourceLocations("classpath:/META-INF/resources/webjars/" + name + "/" + version + "/"); } + public static void webJarAddAceBuilds(ResourceHandlerRegistry theRegistry) { + WebUtil.addStaticResourceWebJar(theRegistry, "org.webjars.npm", "ace-builds"); + } + public static void webJarAddAwesomeCheckbox(ResourceHandlerRegistry theRegistry) { WebUtil.addStaticResourceWebJar(theRegistry, "org.webjars.bower", "awesome-bootstrap-checkbox"); } diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/hfql.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/hfql.html new file mode 100644 index 00000000000..0f6ec7d7149 --- /dev/null +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/hfql.html @@ -0,0 +1,145 @@ + + + + + <th:block th:include="tmpl-head :: head" /> + <script th:include="tmpl-buttonclick-handler :: handler" /> + + <style type="text/css" media="screen"> + #editor { + width: 100%; + height: 150px; + border-top: 1px solid #AAAAAA; + } + #resultsTableCard { + padding: 0; + } + .resultsTable { + margin-bottom: 0; + width: auto; + } + .card { + margin-bottom: 10px; + } + </style> + + </head> + + <body> + <form action="" method="get" id="outerForm"> + <input type="hidden" id="serverId" name="serverId" th:value="${serverId}"></input> + <input th:if="${_csrf} != null" type="hidden" th:name="${_csrf.parameterName}" th:value="${_csrf.token}" /> + + <div th:replace="tmpl-navbar-top :: top" ></div> + + <div class="container-fluid"> + <div class="row"> + + <div th:replace="tmpl-navbar-left :: left" ></div> + + <div class="col-sm-9 col-sm-offset-3 col-md-9 col-md-offset-3 main" style="height: calc(100% - 200px);"> + + <div th:replace="tmpl-banner :: banner"></div> + + <div class="card" style="margin-top: 10px;"> + <h3 class="card-header">HFQL / SQL Execution</h3> + <div class="card-body"> + This page can be used to execute queries using the HAPI FHIR Query Language (HFQL), + which is a SQL-like syntax for querying FHIR repositories. Learn more about + the HFQL syntax at: + <a href="https://smilecdr.com/docs/hfql/">https://smilecdr.com/docs/hfql/</a>. + This UI will display a maximum of [[${rowLimit}]] rows. + </div> + </div> + + <!-- ************************************************ --> + <!-- ** SQL Editor ** --> + <!-- ************************************************ --> + + <div class="card"> + <h3 class="card-header">Query</h3> + <div class="card-body" style="padding: 0;"> + <button type="button" id="execute-btn" class="btn btn-primary" style="margin: 5px;"> + <i class="fas fa-play"></i> Execute + </button> + <button type="button" id="copy-link-btn" class="btn btn-info" style="margin: 5px;"> + <i class="fas fa-link"></i> Link to Query + </button> + + <div id="editor" th:text="${query}"></div> + + <script th:src="@{/resources/ace-builds/src-min-noconflict/ace.js}"></script> + <script> + const editor = ace.edit("editor"); + editor.setTheme("ace/theme/eclipse"); + editor.setTheme("ace/theme/vibrant_ink"); + editor.setTheme("ace/theme/cobalt"); + editor.session.setMode("ace/mode/sql"); + editor.setShowPrintMargin(false); + + $('#execute-btn').click( + function() { + let btn = $(this); + document.getElementById('execute-btn').innerHTML = '<i class="fas fa-spinner fa-spin"></i> Execute'; + handleActionButtonClick(btn); + let value = editor.getValue(); + btn.append($('<input />', { type: 'hidden', name: 'hfql-query', value: value })); + $("#outerForm").attr("method", "post"); + $("#outerForm").attr("action", "hfql").submit(); + } + ); + + $('#copy-link-btn').click( + function() { + let btn = $(this); + handleActionButtonClick(btn); + let value = editor.getValue(); + btn.append($('<input />', { type: 'hidden', name: 'hfql-query', value: value })); + $("#outerForm").attr("method", "get"); + $("#outerForm").attr("action", "hfql").submit(); + } + ); + + </script> + </div> + </div> + + <!-- ************************************************ --> + <!-- ** Query Results ** --> + <!-- ************************************************ --> + + <div th:if="${resultRows} != null" class="card"> + <h3 class="card-header">Results</h3> + <div class="card-header"> + Query executed in [[${executionTime}]]. + </div> + <div class="card-body" id="resultsTableCard"> + <div class="table-responsive"> + <table class="table table-striped table-bordered table-sm resultsTable" id="resultsTable"> + <thead> + <tr> + <th th:each="columnName : ${columnNames}" th:text="${columnName}"></th> + </tr> + </thead> + <tbody> + <tr th:each="row : ${resultRows}"> + <td th:each="col,colIterStat : ${row}"> + <th:block th:switch="${columnTypes[colIterStat.index]}"> + <th:block th:case="'JSON'" th:utext="${HfqlRenderingUtil.formatAsJson(col)}"/> + <th:block th:case="*" th:text="${col}"/> + </th:block> + </td> + </tr> + </tbody> + </table> + </div> + </div> + </div> + </div> + </div> + + </form> + + <div th:replace="tmpl-footer :: footer" ></div> +</body> +</html> diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/home.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/home.html index 07f5851ad3d..b9cef970968 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/home.html +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/home.html @@ -57,11 +57,9 @@ <!-- ** Server Actions (no resource selected) ** --> <!-- ************************************************ --> - <div class="panel panel-default" th:if="${resourceName.empty}"> - <div class="panel-heading"> - <h3 class="panel-title">Server Actions</h3> - </div> - <div class="panel-body"> + <div class="card" th:if="${resourceName.empty}"> + <h3 class="card-header">Server Actions</h3> + <div class="card-body"> <div class="container-fluid"> <!-- Conformance --> diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-banner.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-banner.html index 64dc06c0796..de9865e1a34 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-banner.html +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-banner.html @@ -1,12 +1,12 @@ <!DOCTYPE html> <html lang="en" xmlns:th="http://www.thymeleaf.org"> <div th:fragment="banner"> - <div class="row" style="padding-top: 20px;"> - <div class="col-md-4" style="padding-top: 47px;"> - <img src="img/hapi_fhir_banner_right.png" /> + <div class="row"> + <div class="col-md-4" style="padding-top: 39px; left: -65px;"> + <img src="img/hapi_fhir_banner_right.png" style="height: 40px;"/> </div> <div class="col-md-8"> - <img src="img/hapi_fhir_banner.png" style="height: 150px;" align="right"/> + <img src="img/hapi_fhir_banner.png" style="height: 120px; padding: 5px;" align="right"/> </div> </div> diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-head.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-head.html index 88708a4f393..09edce2e4d6 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-head.html +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-head.html @@ -34,6 +34,8 @@ <link th:href="@{/resources/select2/css/select2.css}" rel="stylesheet"/> <script th:src="@{/resources/select2/js/select2.min.js}"></script> + <script th:src="@{/resources/ace-builds/src/ace.js}"></script> + <script th:src="@{/resources/ace-builds/src/ext-language_tools.js}"></script> <script src="js/RestfulTester.js" type="text/javascript"></script> <link href="css/tester.css" rel="stylesheet"/> diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-navbar-left.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-navbar-left.html index 9ca8521d144..3c8c8b758be 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-navbar-left.html +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/tmpl-navbar-left.html @@ -107,9 +107,12 @@ <h4>Server</h4> <ul class="nav nav-sidebar"> - <li th:class="(${notHome} == null and ${resourceName} != null and ${resourceName.empty}) ? 'active' : ''"> + <li th:class="${page} == 'home' ? 'active' : ''"> <a href="#" onclick="doAction(this, 'home', null);">Server Home/Actions</a> </li> + <li th:if="${supportsHfql}" th:class="${page} == 'hfql' ? 'active' : ''"> + <a href="#" id="leftHfql" onclick="doAction(this, 'hfql', null);">HFQL / SQL</a> + </li> </ul> <h4>Resources</h4> diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/window-title.html b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/window-title.html index 3f4c8a5689a..04370e8e7d3 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/window-title.html +++ b/hapi-fhir-testpage-overlay/src/main/webapp/WEB-INF/templates/window-title.html @@ -3,4 +3,5 @@ <title th:fragment="home">HAPI FHIR <th:block th:text="${resourceName}"/> - HAPI FHIR Results - HAPI FHIR + HFQL/SQL - HAPI FHIR diff --git a/hapi-fhir-testpage-overlay/src/main/webapp/css/tester.css b/hapi-fhir-testpage-overlay/src/main/webapp/css/tester.css index e5f52ad246c..7c57b9420c7 100644 --- a/hapi-fhir-testpage-overlay/src/main/webapp/css/tester.css +++ b/hapi-fhir-testpage-overlay/src/main/webapp/css/tester.css @@ -220,7 +220,7 @@ body .syntaxhighlighter .line { @media (min-width: 768px) { .sidebar { - top: 51px; + top: 5px; bottom: 0; left: 0; z-index: 1000; diff --git a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/FhirTesterConfig.java b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/FhirTesterConfig.java index 658a60ce83f..02342b874e5 100644 --- a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/FhirTesterConfig.java +++ b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/FhirTesterConfig.java @@ -44,23 +44,28 @@ public class FhirTesterConfig { .withBaseUrl("http://localhost:8888/fhir") .withName("Localhost Server") .allowsApiKey() + .enableDebugTemplates() .addServer() .withId("hapi") .withFhirVersion(FhirVersionEnum.DSTU2) .withBaseUrl("http://hapi.fhir.org/baseDstu2") .withName("Public HAPI Test Server") .allowsApiKey() + .enableDebugTemplates() .addServer() - .withId("home3") - .withFhirVersion(FhirVersionEnum.DSTU3) - .withBaseUrl("http://hapi.fhir.org/baseDstu3") - .withName("Public HAPI Test Server (STU3)") + .withId("home4") + .withFhirVersion(FhirVersionEnum.R4) + .withBaseUrl("http://hapi.fhir.org/baseR4") + .withName("Public HAPI Test Server (R4)") + .enableDebugTemplates() .addServer() .withId("home") .withFhirVersion(FhirVersionEnum.DSTU2) .withBaseUrl("${serverBase}/baseDstu2") - .withName("Local Tester"); + .withName("Local Tester") + .enableDebugTemplates(); return retVal; } } + diff --git a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/OverlayTestApp.java b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/OverlayTestApp.java index b17eaa0eea3..c52d63fe68f 100644 --- a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/OverlayTestApp.java +++ b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/OverlayTestApp.java @@ -1,38 +1,58 @@ package ca.uhn.fhir.jpa.test; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.executor.StaticHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.annotation.Description; -import ca.uhn.fhir.model.dstu2.composite.IdentifierDt; -import ca.uhn.fhir.model.dstu2.resource.DiagnosticReport; -import ca.uhn.fhir.model.dstu2.resource.Patient; import ca.uhn.fhir.rest.annotation.IncludeParam; import ca.uhn.fhir.rest.annotation.OptionalParam; import ca.uhn.fhir.rest.annotation.RequiredParam; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.param.TokenOrListParam; +import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.RestfulServer; +import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.webapp.WebAppContext; +import org.hl7.fhir.r4.model.DiagnosticReport; +import org.hl7.fhir.r4.model.Patient; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import javax.annotation.Nonnull; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Set; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class OverlayTestApp { private static AnnotationConfigApplicationContext ourAppCtx; - @SuppressWarnings({ "unchecked" }) + @SuppressWarnings({"unchecked"}) public static void main(String[] args) throws Exception { + IHfqlExecutor hfqlExecutor = mock(IHfqlExecutor.class); + List columnNames = List.of("family", "given"); + List columnTypes = List.of(HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.JSON); + List> rows = List.of( + List.of("Simpson", "[\"Homer\", \"Jay\"]"), + List.of("Simpson", "[\"Bart\", \"Barto\"]") + ); + when(hfqlExecutor.executeInitialSearch(any(), any(), any())).thenAnswer(t-> { + Thread.sleep(1000); + return new StaticHfqlExecutionResult("the-search-id", columnNames, columnTypes, rows); + }); { int myPort = 8888; @@ -44,8 +64,12 @@ public class OverlayTestApp { overlayHandler.setResourceBase("hapi-fhir-testpage-overlay/src/main/webapp"); overlayHandler.setParentLoaderPriority(true); - RestfulServer restfulServer = new RestfulServer(FhirContext.forDstu2()); + FhirContext ctx = FhirContext.forR4Cached(); + RestfulServer restfulServer = new RestfulServer(ctx); restfulServer.registerProvider(new ProviderWithRequiredAndOptional()); + restfulServer.registerProvider(new HashMapResourceProvider<>(ctx, Patient.class)); + restfulServer.registerProvider(new HfqlRestProvider(hfqlExecutor)); + ServletContextHandler proxyHandler = new ServletContextHandler(); proxyHandler.setContextPath("/"); ServletHolder servletHolder = new ServletHolder(); @@ -58,8 +82,10 @@ public class OverlayTestApp { } - if (true) {return;} - + if (true) { + return; + } + // ourAppCtx = new AnnotationConfigApplicationContext(FhirServerConfig.class); // ServletContextHandler proxyHandler = new ServletContextHandler(); // proxyHandler.setContextPath("/"); @@ -155,15 +181,15 @@ public class OverlayTestApp { @Description(shortDefinition = "This is a query by date!") @Search - public List findDiagnosticReportsByPatient(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) IdentifierDt thePatientId, @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, - @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam theDateRange, @IncludeParam(allow = { "DiagnosticReport.result" }) Set theIncludes) throws Exception { + public List findDiagnosticReportsByPatient(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) TokenParam thePatientId, @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, + @OptionalParam(name = DiagnosticReport.SP_DATE) DateRangeParam theDateRange, @IncludeParam(allow = {"DiagnosticReport.result"}) Set theIncludes) throws Exception { return getDiagnosticReports(); } @Description(shortDefinition = "This is a query by issued.. blah blah foo bar blah blah") @Search - public List findDiagnosticReportsByPatientIssued(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) IdentifierDt thePatientId, @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, - @OptionalParam(name = DiagnosticReport.SP_ISSUED) DateRangeParam theDateRange, @IncludeParam(allow = { "DiagnosticReport.result" }) Set theIncludes) throws Exception { + public List findDiagnosticReportsByPatientIssued(@RequiredParam(name = DiagnosticReport.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) TokenParam thePatientId, @OptionalParam(name = DiagnosticReport.SP_CODE) TokenOrListParam theNames, + @OptionalParam(name = DiagnosticReport.SP_ISSUED) DateRangeParam theDateRange, @IncludeParam(allow = {"DiagnosticReport.result"}) Set theIncludes) throws Exception { return getDiagnosticReports(); } @@ -191,7 +217,7 @@ public class OverlayTestApp { } @Override - public Class getResourceType() { + public Class getResourceType() { return DiagnosticReport.class; } diff --git a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/WebTest.java b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/WebTest.java index c2101ddd30f..d1eb4f79a31 100644 --- a/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/WebTest.java +++ b/hapi-fhir-testpage-overlay/src/test/java/ca/uhn/fhir/jpa/test/WebTest.java @@ -1,6 +1,10 @@ package ca.uhn.fhir.jpa.test; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum; +import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor; +import ca.uhn.fhir.jpa.fql.executor.StaticHfqlExecutionResult; +import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.Validate; @@ -20,6 +24,8 @@ import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.html.HtmlTable; import com.gargoylesoftware.htmlunit.html.HtmlTableCell; import com.gargoylesoftware.htmlunit.html.HtmlTableRow; +import com.gargoylesoftware.htmlunit.html.HtmlTextArea; +import com.gargoylesoftware.htmlunit.html.XHtmlPage; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.servlet.ServletContextHandler; @@ -39,7 +45,10 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.test.web.servlet.MockMvc; @@ -57,23 +66,33 @@ import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +@ExtendWith(MockitoExtension.class) public class WebTest { private static final Logger ourLog = LoggerFactory.getLogger(WebTest.class); private static final FhirContext ourCtx = FhirContext.forR4Cached(); + private static final HfqlRestProvider ourHfqlProvider = new HfqlRestProvider(); + @RegisterExtension @Order(0) - private static final RestfulServerExtension ourFhirServer = new RestfulServerExtension(ourCtx) - .registerProvider(new MyPatientFakeDocumentController()); + public static final RestfulServerExtension ourFhirServer = new RestfulServerExtension(ourCtx) + .registerProvider(new MyPatientFakeDocumentController()) + .registerProvider(ourHfqlProvider); @RegisterExtension @Order(1) - private static final HashMapResourceProviderExtension ourPatientProvider = new HashMapResourceProviderExtension<>(ourFhirServer, Patient.class); + public static final HashMapResourceProviderExtension ourPatientProvider = new HashMapResourceProviderExtension<>(ourFhirServer, Patient.class); protected static MockMvc ourMockMvc; private static Server ourOverlayServer; private WebClient myWebClient; + @Mock + private IHfqlExecutor myHfqlExecutor; @BeforeEach public void before() throws Exception { + ourHfqlProvider.setHfqlExecutor(myHfqlExecutor); + if (ourOverlayServer == null) { AnnotationConfigWebApplicationContext appCtx = new AnnotationConfigWebApplicationContext(); appCtx.register(WebTestFhirTesterConfig.class); @@ -218,6 +237,36 @@ public class WebTest { assertThat(diffPage.asNormalizedText(), containsString("\"resourceType\": \"Parameters\"")); } + + @Test + public void testHfqlExecuteQuery() throws IOException { + // Load home page + HtmlPage page = myWebClient.getPage("http://localhost/"); + // Navigate to HFQL page + HtmlAnchor hfqlNavButton = page.getHtmlElementById("leftHfql"); + HtmlPage hfqlPage = hfqlNavButton.click(); + assertEquals("HFQL/SQL - HAPI FHIR", hfqlPage.getTitleText()); + + // Prepare response + List columnNames = List.of("Family", "Given"); + List columnTypes = List.of(HfqlDataTypeEnum.STRING, HfqlDataTypeEnum.STRING); + List> rows = List.of( + List.of("Simpson", "Homer"), + List.of("Simpson", "Bart") + ); + StaticHfqlExecutionResult result = new StaticHfqlExecutionResult(null, columnNames, columnTypes, rows); + when(myHfqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(result); + + // Click execute button + HtmlButton executeBtn = (HtmlButton) hfqlPage.getElementById("execute-btn"); + HtmlPage resultsPage = executeBtn.click(); + + HtmlTable table = (HtmlTable) resultsPage.getElementById("resultsTable"); + ourLog.info(table.asXml()); + assertThat(table.asNormalizedText(), containsString("Simpson")); + } + + private void registerAndUpdatePatient() { Patient p = new Patient(); Patient p2 = new Patient(); @@ -310,5 +359,4 @@ public class WebTest { JettyUtil.closeServer(ourOverlayServer); } - } diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index f2c55a822cc..f2940b20b9a 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 2f99769aeab..85275e19110 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index d554a46a8db..1116b98e2c0 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 528a836d643..2fc6a8b8ddb 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index 84f316f8ae7..b430db55a99 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index ee6c079d7b1..d3366b0b192 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index fed8b73e21a..7c04169b4a0 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FhirPathTest.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FhirPathTest.java new file mode 100644 index 00000000000..b8d73000b8a --- /dev/null +++ b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FhirPathTest.java @@ -0,0 +1,112 @@ +package ca.uhn.fhir.fhirpath; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.util.FhirTerser; +import ca.uhn.fhir.util.TestUtil; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.HumanName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import javax.annotation.Nonnull; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class FhirPathTest { + + @ParameterizedTest + @MethodSource("provideContexts") + public void testEvaluateNormal(FhirContext theFhirContext) { + IBaseResource resource = createPatientResourceWithTwoNames(theFhirContext); + IFhirPath fp = theFhirContext.newFhirPath(); + List names = fp.evaluate(resource, "Patient.name", IBase.class); + assertEquals(2, names.size()); + } + + @SuppressWarnings("deprecation") + @ParameterizedTest + @MethodSource("provideContexts") + public void testEvaluateNormal_LegacyMethod(FhirContext theFhirContext) { + IBaseResource p = createPatientResourceWithTwoNames(theFhirContext); + + IFhirPath fp = theFhirContext.newFluentPath(); + List names = fp.evaluate(p, "Patient.name", IBase.class); + assertEquals(2, names.size()); + } + + @ParameterizedTest + @MethodSource("provideContexts") + public void testEvaluateUnknownPath(FhirContext theFhirContext) { + IBaseResource p = createPatientResourceWithTwoNames(theFhirContext); + + IFhirPath fp = theFhirContext.newFhirPath(); + List names = fp.evaluate(p, "Patient.nameFOO", HumanName.class); + assertEquals(0, names.size()); + } + + @ParameterizedTest + @MethodSource("provideContexts") + public void testEvaluateInvalidPath(FhirContext theFhirContext) { + IBaseResource p = createPatientResourceWithTwoNames(theFhirContext); + + IFhirPath fp = theFhirContext.newFhirPath(); + try { + fp.evaluate(p, "Patient....nameFOO", HumanName.class); + } catch (FhirPathExecutionException e) { + assertThat(e.getMessage(), containsString("termination at unexpected token")); + } + } + + @ParameterizedTest + @MethodSource("provideContexts") + public void testEvaluateWrongType(FhirContext theFhirContext) { + IBaseResource p = createPatientResourceWithTwoNames(theFhirContext); + + Class stringType = theFhirContext.getElementDefinition("string").getImplementingClass(); + + IFhirPath fp = theFhirContext.newFhirPath(); + try { + fp.evaluate(p, "Patient.name", stringType); + } catch (FhirPathExecutionException e) { + String expected = "FhirPath expression returned unexpected type HumanName - Expected " + stringType.getName(); + assertThat(e.getMessage(), + endsWith(expected)); + } + } + + @Nonnull + private static IBaseResource createPatientResourceWithTwoNames(FhirContext theFhirContext) { + IBaseResource resource = theFhirContext.getResourceDefinition("Patient").newInstance(); + FhirTerser terser = theFhirContext.newTerser(); + IBase humanName = terser.addElement(resource, "name"); + terser.addElement(humanName, "family", "N1F1"); + terser.addElement(humanName, "given", "N1G1"); + terser.addElement(humanName, "given", "N1G2"); + IBase humanName2 = terser.addElement(resource, "name"); + terser.addElement(humanName2, "family", "N2F1"); + terser.addElement(humanName2, "given", "N2G1"); + terser.addElement(humanName2, "given", "N2G2"); + return resource; + } + + @AfterAll + public static void afterClassClearContext() { + TestUtil.randomizeLocaleAndTimezone(); + } + + public static Stream provideContexts() { + return Arrays + .stream(FhirVersionEnum.values()) + .filter(t -> t.isEqualOrNewerThan(FhirVersionEnum.DSTU3)) + .map(FhirContext::forCached); + } +} diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FluentPathTest.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FluentPathTest.java deleted file mode 100644 index be9060ec714..00000000000 --- a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/fhirpath/FluentPathTest.java +++ /dev/null @@ -1,81 +0,0 @@ -package ca.uhn.fhir.fhirpath; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.util.TestUtil; -import org.hl7.fhir.dstu3.model.HumanName; -import org.hl7.fhir.dstu3.model.Patient; -import org.hl7.fhir.dstu3.model.StringType; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Test; - -import java.util.List; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsString; -import static org.junit.jupiter.api.Assertions.assertEquals; - -public class FluentPathTest { - - @Test - public void testEvaluateNormal() { - Patient p = new Patient(); - p.addName().setFamily("N1F1").addGiven("N1G1").addGiven("N1G2"); - p.addName().setFamily("N2F1").addGiven("N2G1").addGiven("N2G2"); - - IFhirPath fp = ourCtx.newFluentPath(); - List names = fp.evaluate(p, "Patient.name", HumanName.class); - assertEquals(2, names.size()); - assertEquals("N1F1", names.get(0).getFamily()); - assertEquals("N1G1 N1G2", names.get(0).getGivenAsSingleString()); - assertEquals("N2F1", names.get(1).getFamily()); - assertEquals("N2G1 N2G2", names.get(1).getGivenAsSingleString()); - } - - @Test - public void testEvaluateUnknownPath() { - Patient p = new Patient(); - p.addName().setFamily("N1F1").addGiven("N1G1").addGiven("N1G2"); - p.addName().setFamily("N2F1").addGiven("N2G1").addGiven("N2G2"); - - IFhirPath fp = ourCtx.newFluentPath(); - List names = fp.evaluate(p, "Patient.nameFOO", HumanName.class); - assertEquals(0, names.size()); - } - - @Test - public void testEvaluateInvalidPath() { - Patient p = new Patient(); - p.addName().setFamily("N1F1").addGiven("N1G1").addGiven("N1G2"); - p.addName().setFamily("N2F1").addGiven("N2G1").addGiven("N2G2"); - - IFhirPath fp = ourCtx.newFluentPath(); - try { - fp.evaluate(p, "Patient....nameFOO", HumanName.class); - } catch (FhirPathExecutionException e) { - assertThat(e.getMessage(), containsString("termination at unexpected token")); - } - } - - @Test - public void testEvaluateWrongType() { - Patient p = new Patient(); - p.addName().setFamily("N1F1").addGiven("N1G1").addGiven("N1G2"); - p.addName().setFamily("N2F1").addGiven("N2G1").addGiven("N2G2"); - - IFhirPath fp = ourCtx.newFluentPath(); - try { - fp.evaluate(p, "Patient.name", StringType.class); - } catch (FhirPathExecutionException e) { - assertEquals(Msg.code(608) + "FluentPath expression \"Patient.name\" returned unexpected type HumanName - Expected org.hl7.fhir.dstu3.model.StringType", e.getMessage()); - } - } - - private static FhirContext ourCtx = FhirContext.forDstu3(); - - @AfterAll - public static void afterClassClearContext() { - TestUtil.randomizeLocaleAndTimezone(); - } - -} diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 778ae12765b..89ccf1a8ca4 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index fedb4568d40..1ce45e0e1db 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 1ecf90d0963..51fbef87e5a 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. @@ -112,6 +112,7 @@ hapi-fhir-jpaserver-base hapi-fhir-sql-migrate hapi-fhir-jpaserver-ips + hapi-fhir-jpaserver-hfql hapi-fhir-jpaserver-mdm hapi-fhir-testpage-overlay hapi-fhir-jpaserver-uhnfhirtest @@ -2095,6 +2096,11 @@ thymeleaf-spring5 ${thymeleaf-version} + + org.webjars.npm + ace-builds + 1.22.0 + org.webjars.npm bootstrap diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 1f1e7d1a2e6..39eded50466 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 4f19ea3d22c..be59219629e 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 5869b56638a..3c9403e8e9e 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.7.14-SNAPSHOT + 6.7.15-SNAPSHOT ../../pom.xml