Initial HFQL SQL Implementation (#5083)

* FQL tests

* Working so far

* Tests working

* Tests passing

* Work on parser

* Tests passing

* Cleanup

* Working

* Work on parser

* Work

* Working

* Work on tests

* Work on driver

* Work

* Ongoing work

* Rename SEARCH to WHERE

* Work on fhirpath

* Test fixes

* Work on statements

* Build fixes

* Spotless application

* Fix CI

* Build fixes

* Clean up changelogs

* Remove redundant gitignore

* Add docs

* Add docs

* Code cleanup

* Build cleanup

* Fix compile error

* Test fixes

* Test fix

* Test fix

* Work on integration

* Work on design

* Work on UI

* Update hapi-fhir-jpaserver-hfql/src/main/java/ca/uhn/fhir/jpa/fql/jdbc/JdbcDriver.java

Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>

* Address review comments

* Review comments

* HFQL work

* Cleanup

* CLeanup

* License headers

* Test fixes

* HAPI version bump

* License header update

* Changelog cleanup

* Fixes

* Test fix

* Fix spotless issues

---------

Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>
This commit is contained in:
James Agnew 2023-07-21 18:51:23 -04:00 committed by GitHub
parent f68f3fbb0f
commit 6d745b4f2d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
164 changed files with 11005 additions and 286 deletions

View File

@ -59,6 +59,8 @@ stages:
module: hapi-fhir-jpaserver-elastic-test-utilities
- name: hapi_fhir_jpaserver_ips
module: hapi-fhir-jpaserver-ips
- name: hapi_fhir_jpaserver_hfql
module: hapi-fhir-jpaserver-hfql
- name: hapi_fhir_jpaserver_mdm
module: hapi-fhir-jpaserver-mdm
- name: hapi_fhir_jpaserver_model

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -29,8 +29,8 @@ public class FhirPathExecutionException extends InternalErrorException {
private static final long serialVersionUID = 1L;
public FhirPathExecutionException(Throwable theCause) {
super(theCause);
public FhirPathExecutionException(String theMessage, Throwable theCause) {
super(theMessage, theCause);
}
public FhirPathExecutionException(String theMessage) {

View File

@ -37,20 +37,55 @@ public interface IFhirPath {
*/
<T extends IBase> List<T> evaluate(IBase theInput, String thePath, Class<T> theReturnType);
/**
* Apply the given FhirPath expression against the given input and return
* all results in a list. Unlike the {@link #evaluate(IBase, String, Class)} method which
* uses a String containing a FHIRPath expression, this method takes a parsed FHIRPath
* expression returned by the {@link #parse(String)} method. This has the advantage
* of avoiding re-parsing expressions if the same expression will be evaluated
* repeatedly.
*
* @param theInput The input object (generally a resource or datatype)
* @param theParsedExpression A parsed FHIRPath expression returned by {@link #parse(String)}
* @param theReturnType The type to return (in order to avoid casting)
* @since 6.8.0
*/
<T extends IBase> List<T> evaluate(IBase theInput, IParsedExpression theParsedExpression, Class<T> theReturnType);
/**
* Apply the given FhirPath expression against the given input and return
* the first match (if any)
*
* @param theInput The input object (generally a resource or datatype)
* @param thePath The fluent path expression
* @param theInput The input object (generally a resource or datatype)
* @param thePath The fluent path expression
* @param theReturnType The type to return (in order to avoid casting)
*/
<T extends IBase> Optional<T> evaluateFirst(IBase theInput, String thePath, Class<T> theReturnType);
/**
* Parses the expression and throws an exception if it can not parse correctly
* Apply the given FhirPath expression against the given input and return
* the first match (if any). Unlike the {@link #evaluateFirst(IBase, String, Class)} method which
* uses a String containing a FHIRPath expression, this method takes a parsed FHIRPath
* expression returned by the {@link #parse(String)} method. This has the advantage
* of avoiding re-parsing expressions if the same expression will be evaluated
* repeatedly.
*
* @param theInput The input object (generally a resource or datatype)
* @param theParsedExpression A parsed FHIRPath expression returned by {@link #parse(String)}
* @param theReturnType The type to return (in order to avoid casting)
* @since 6.8.0
*/
void parse(String theExpression) throws Exception;
<T extends IBase> Optional<T> evaluateFirst(
IBase theInput, IParsedExpression theParsedExpression, Class<T> theReturnType);
/**
* Parses the expression and throws an exception if it can not parse correctly.
* Note that the return type from this method is intended to be a "black box". It can
* be passed back into the {@link #evaluate(IBase, IParsedExpression, Class)}
* method on any FHIRPath instance that comes from the same {@link ca.uhn.fhir.context.FhirContext}
* instance. Any other use will produce unspecified results.
*/
IParsedExpression parse(String theExpression) throws Exception;
/**
* This method can be used optionally to supply an evaluation context for the
@ -61,4 +96,23 @@ public interface IFhirPath {
* @since 6.4.0
*/
void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext);
/**
* This interface is a marker interface representing a parsed FHIRPath expression.
* Instances of this class will be returned by {@link #parse(String)} and can be
* passed to {@link #evaluate(IBase, IParsedExpression, Class)} and
* {@link #evaluateFirst(IBase, IParsedExpression, Class)}. Using a pre-parsed
* FHIRPath expression can perform much faster in some situations where an
* identical expression will be evaluated many times against different targets,
* since the parsing step doesn't need to be repeated.
* <p>
* Instances of this interface should be treated as a "black box". There are no
* methods that can be used to manipulate parsed FHIRPath expressions.
* </p>
*
* @since 6.8.0
*/
interface IParsedExpression {
// no methods
}
}

View File

@ -40,6 +40,9 @@ public interface IOperationUntypedWithInput<T> extends IClientExecutable<IOperat
* resource, use this method to specify that resource type. This is useful for certain
* operations (e.g. <code>Patient/NNN/$everything</code>) which return a bundle instead of
* a Parameters resource.
* <p>
* Passing in {@literal Binary.class} allows any arbitrary response to be returned. Any payload at
* all will be read as raw bytes into a Binary resource.
*/
<R extends IBaseResource> IOperationUntypedWithInput<R> returnResourceType(Class<R> theReturnType);

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.util;
public final class FhirTypeUtil {

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -84,6 +84,11 @@
<artifactId>hapi-fhir-jpaserver-ips</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>hapi-fhir-jpaserver-mdm</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -136,6 +136,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseConformance;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
@ -1422,7 +1423,11 @@ public class GenericClient extends BaseClient implements IGenericClient {
if (myReturnResourceType != null) {
ResourceResponseHandler handler;
handler = new ResourceResponseHandler(myReturnResourceType);
if (IBaseBinary.class.isAssignableFrom(myReturnResourceType)) {
handler = new ResourceOrBinaryResponseHandler();
} else {
handler = new ResourceResponseHandler(myReturnResourceType);
}
Object retVal = invoke(null, handler, invocation);
return retVal;
}

View File

@ -19,9 +19,6 @@
*/
package ca.uhn.fhir.rest.client.impl;
import ca.uhn.fhir.rest.client.api.IBasicClient;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
@ -35,9 +32,10 @@ import org.apache.http.protocol.HttpContext;
import java.io.IOException;
/**
* @deprecated Use {@link ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor} instead. Note that BasicAuthInterceptor class is a HAPI client interceptor instead of being a commons-httpclient interceptor, so you register it to your client instance once it's created using {@link IGenericClient#registerInterceptor(IClientInterceptor)} or {@link IBasicClient#registerInterceptor(IClientInterceptor)} instead
* Apache HTTPClient interceptor which adds basic auth
*
* @see ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor A HAPI FHIR interceptor that is generally easier to use
*/
@Deprecated
public class HttpBasicAuthInterceptor implements HttpRequestInterceptor {
private String myUsername;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -233,6 +233,13 @@
<!-- Don't include in standard distribution -->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
@ -277,6 +284,7 @@
<descriptor>${project.basedir}/src/assembly/hapi-fhir-standard-distribution.xml</descriptor>
<descriptor>${project.basedir}/src/assembly/hapi-fhir-android-distribution.xml</descriptor>
<descriptor>${project.basedir}/src/assembly/hapi-fhir-cli.xml</descriptor>
<descriptor>${project.basedir}/src/assembly/hapi-fhir-hfql-jdbc-driver.xml</descriptor>
</descriptors>
<finalName>hapi-fhir-${project.version}</finalName>
</configuration>

View File

@ -6,7 +6,6 @@
<formats>
<format>zip</format>
<format>tar.bz2</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
<id>jpaserver-example</id>
<id>hfql-jdbc-driver</id>
<formats>
<format>zip</format>
@ -11,11 +11,10 @@
<fileSets>
<fileSet>
<directory>${project.basedir}/../hapi-fhir-jpaserver-example</directory>
<directory>${project.basedir}/../hapi-fhir-jpaserver-hfql/target/</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>pom.xml</include>
<include>src/**</include>
<include>hapi-fhir-hfql-jdbc-*.jar</include>
</includes>
</fileSet>
</fileSets>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,7 @@
---
type: add
issue: 5083
title: "The IFhirPath evaluator interface now has an additional overload of the
`evaluate` method which takes in a parsed expression returned by the
`parse` method. This can be used to improve performance in cases where the same
expression is being used repeatedly."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 5083
title: "A new SQL-like evaluator called the HAPI FHIR Query Language (HFQL)
has been added."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 5115
title: "A new experimental SQL-like query syntax called HFQL (HAPI FHIR Query Language)
has been added."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 5115
title: "The Generic/Fluent client can now handle arbitrary (ie. non-FHIR) responses from $operation
invocation by specifying a response resource type of Binary."

View File

@ -85,6 +85,9 @@ page.server_jpa_partitioning.enabling_in_hapi_fhir=Enabling Partitioning in HAPI
section.server_jpa_batch.title=JPA Server: Batch Processing
page.server_jpa_batch.introduction=Batch Introduction
section.hfql.title=JPA Server: HFQL (SQL) Driver
page.hfql.hfql=HFQL Module
section.clinical_reasoning.title=Clinical Reasoning
page.clinical_reasoning.overview=Clinical Reasoning Overview
page.clinical_reasoning.cql=CQL

View File

@ -0,0 +1,56 @@
# FQL Driver: SQL For FHIR Repositories
<div class="helpInfoCalloutBox">
This is an <a href="https://smilecdr.com/docs/introduction/maturity_model.html">experimental module</a>. Use with caution. This API is likely to change.
</div>
The HAPI FHIR JPA server can optionally be configured to support SQL-like queries against the FHIR repository. This module is intended for analytical queries. It is not optimized for performance, and may take a long time to produce results.
# Syntax
This module uses a proprietary flavour of SQL that is specific to HAPI FHIR. It is similar to the [Firely Query Language](https://simplifier.net/docs/fql), although it also has differences.
A simple example query is shown below:
```sql
SELECT
name.family as family,
name.given as given,
birthDate,
identifier.where(system='http://hl7.org/fhir/sid/us-ssn').value as SSN
FROM
Patient
WHERE
active = true
```
See [SQL Syntax](https://smilecdr.com/docs/hfql/sql_syntax.html) for details on this syntax.
# JDBC Driver
When HFQL is enabled on the server, a JDBC-compatible driver is available. This can be used to query the FHIR server directly from a JDBC compliant database browser.
This module has been tested with [DBeaver](https://dbeaver.io/), which is a free and excellent database browser. Other JDBC compatible database tools may also work. Note that not all JDBC API methods have been implemented in the driver, so other tools may use methods that have not yet been implemented. Please let us know in the [Google Group](https://groups.google.com/g/hapi-fhir) if you encounter issues or have suggestions.
The JDBC driver can be downloaded from the [GitHub Releases site](https://github.com/hapifhir/hapi-fhir/releases). It can also be built from sources by executing the following command:
```bash
mvn -DskipTests -P DIST clean install -pl :hapi-fhir-jpaserver-hfql -am
```
To import this driver into your database tool, import the JDBC JAR and use the following settings:
<table class="table table-striped table-condensed">
<thead><tr><th>Setting</th><th>Description</th></tr></thead>
<tbody>
<tr>
<td>Class Name</td><td>ca.uhn.fhir.jpa.fql.jdbc.JdbcDriver</td>
</tr><tr>
<td>URL</td><td>jdbc:hapifhirql:[server_base_url]</td>
</tr><tr>
<td>Username</td><td rowspan="2">If provided, the username/password will be added as an HTTP Basic Authorization header on all requests to the server.</td>
</tr><tr>
<td>Password</td>
</tr>
</tbody>
</table>

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -182,6 +182,11 @@
<artifactId>hapi-fhir-jpaserver-model</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-ips</artifactId>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -2559,7 +2559,7 @@ public class QueryStack {
mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName);
if (fullChainParam != null) {
List<IQueryParameterType> swappedParamTypes = nextAnd.stream()
.map(t -> toParameterType(fullChainParam, null, t.getValueAsQueryToken(myFhirContext)))
.map(t -> newParameterInstance(fullChainParam, null, t.getValueAsQueryToken(myFhirContext)))
.collect(Collectors.toList());
List<List<IQueryParameterType>> params = List.of(swappedParamTypes);
Condition predicate = createPredicateSearchParameter(
@ -2660,15 +2660,15 @@ public class QueryStack {
mySqlBuilder.addPredicate(predicate);
}
public IQueryParameterType toParameterType(
public IQueryParameterType newParameterInstance(
RuntimeSearchParam theParam, String theQualifier, String theValueAsQueryToken) {
IQueryParameterType qp = toParameterType(theParam);
IQueryParameterType qp = newParameterInstance(theParam);
qp.setValueAsQueryToken(myFhirContext, theParam.getName(), theQualifier, theValueAsQueryToken);
return qp;
}
private IQueryParameterType toParameterType(RuntimeSearchParam theParam) {
private IQueryParameterType newParameterInstance(RuntimeSearchParam theParam) {
IQueryParameterType qp;
switch (theParam.getParamType()) {
@ -2694,8 +2694,8 @@ public class QueryStack {
throw new InternalErrorException(Msg.code(1224) + "Parameter " + theParam.getName() + " has "
+ compositeOf.size() + " composite parts. Don't know how handlt this.");
}
IQueryParameterType leftParam = toParameterType(compositeOf.get(0));
IQueryParameterType rightParam = toParameterType(compositeOf.get(1));
IQueryParameterType leftParam = newParameterInstance(compositeOf.get(0));
IQueryParameterType rightParam = newParameterInstance(compositeOf.get(1));
qp = new CompositeParam<>(leftParam, rightParam);
break;
case URI:
@ -2876,7 +2876,7 @@ public class QueryStack {
if (RestSearchParameterTypeEnum.REFERENCE.equals(nextSearchParam.getParamType())) {
orValues.add(new ReferenceParam(nextQualifier, "", theTargetValue));
} else {
IQueryParameterType qp = toParameterType(nextSearchParam);
IQueryParameterType qp = newParameterInstance(nextSearchParam);
qp.setValueAsQueryToken(myFhirContext, nextSearchParam.getName(), null, theTargetValue);
orValues.add(qp);
}

View File

@ -692,7 +692,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
type.setValueAsQueryToken(getFhirContext(), theParamName, qualifier, resourceId);
chainValue = type;
} else {
chainValue = myQueryStack.toParameterType(param, qualifier, resourceId);
chainValue = myQueryStack.newParameterInstance(param, qualifier, resourceId);
}
return chainValue;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,68 @@
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<packaging>jar</packaging>
<name>HAPI FHIR JPA Server - HFQL Driver</name>
<dependencies>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-base</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Provided -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<!-- Test -->
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-test-utilities</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>DIST</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<attach>false</attach>
<finalName>hapi-fhir-hfql-jdbc-${project.version}</finalName>
<appendAssemblyId>false</appendAssemblyId>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,46 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Types;
public enum HfqlDataTypeEnum {
STRING(Types.VARCHAR, String.class),
JSON(Types.VARCHAR, String.class),
INTEGER(Types.INTEGER, Integer.class),
BOOLEAN(Types.BOOLEAN, Boolean.class),
DATE(Types.DATE, Date.class),
TIMESTAMP(Types.TIMESTAMP_WITH_TIMEZONE, Date.class),
LONGINT(Types.BIGINT, Long.class),
TIME(Types.TIME, String.class),
DECIMAL(Types.DECIMAL, BigDecimal.class);
private final int mySqlType;
HfqlDataTypeEnum(int theSqlType, Class<?> theJavaType) {
mySqlType = theSqlType;
}
public int getSqlType() {
return mySqlType;
}
}

View File

@ -0,0 +1,921 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.FhirPathExecutionException;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.fql.parser.HfqlFhirPathParser;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatementParser;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateOrListParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.QualifierDetails;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.collect.Lists;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.DateTimeType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
/**
* This class could be considered the main entrypoint into the HFQL executor.
* It receives a raw HFQL query, parses it, executes it, and returns a result set.
* Conceptually the {@link #executeInitialSearch(String, Integer, RequestDetails)}
* method can be thought of like the JPA DAO <code>search</code> method, and the
* {@link #executeContinuation(HfqlStatement, String, int, Integer, RequestDetails)}
* can be thought of like loading a subsequent page of the search results.
* <p>
* Both of these methods return an {@link IHfqlExecutionResult}, which is essentially
* a result row iterator.
*/
public class HfqlExecutor implements IHfqlExecutor {
public static final int BATCH_SIZE = 1000;
public static final String[] EMPTY_STRING_ARRAY = new String[0];
public static final Set<GroupByKey> NULL_GROUP_BY_KEY = Set.of(new GroupByKey(List.of()));
private static final Logger ourLog = LoggerFactory.getLogger(HfqlExecutor.class);
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private FhirContext myFhirContext;
@Autowired
private IPagingProvider myPagingProvider;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
/**
* Constructor
*/
public HfqlExecutor() {
super();
}
@Override
public IHfqlExecutionResult executeInitialSearch(
String theStatement, Integer theLimit, RequestDetails theRequestDetails) {
try {
return doExecuteInitialSearch(theStatement, theLimit, theRequestDetails);
} catch (Exception e) {
ourLog.warn("Failed to execute HFFQL statement", e);
return StaticHfqlExecutionResult.withError(defaultIfNull(e.getMessage(), "(no message)"));
}
}
@Nonnull
private IHfqlExecutionResult doExecuteInitialSearch(
String theStatement, Integer theLimit, RequestDetails theRequestDetails) {
HfqlStatementParser parser = new HfqlStatementParser(myFhirContext, theStatement);
HfqlStatement statement = parser.parse();
IFhirResourceDao dao = myDaoRegistry.getResourceDao(statement.getFromResourceName());
if (dao == null) {
throw new DataFormatException(
Msg.code(2406) + "Unknown or unsupported FROM type: " + statement.getFromResourceName());
}
massageSelectColumnNames(statement);
populateSelectColumnDataTypes(statement);
SearchParameterMap map = new SearchParameterMap();
addHfqlWhereClausesToSearchParameterMap(statement, map);
Integer limit = theLimit;
if (statement.hasOrderClause()) {
/*
* If we're ordering search results, we need to load all available data in order
* to sort it because we handle ordering in application code currently. A good
* future optimization would be to handle ordering in the database when possible,
* but we can't always do that because the query can specify an order on any
* arbitrary FHIRPath expression.
*/
limit = null;
} else if (statement.getLimit() != null) {
limit = limit == null ? statement.getLimit() : Math.min(limit, statement.getLimit());
}
HfqlExecutionContext executionContext = new HfqlExecutionContext(myFhirContext.newFhirPath());
IBundleProvider outcome = dao.search(map, theRequestDetails);
Predicate<IBaseResource> whereClausePredicate = newWhereClausePredicate(executionContext, statement);
IHfqlExecutionResult executionResult;
if (statement.hasCountClauses()) {
executionResult = executeCountClause(statement, executionContext, outcome, whereClausePredicate);
} else {
executionResult = new LocalSearchHfqlExecutionResult(
statement, outcome, executionContext, limit, 0, whereClausePredicate, myFhirContext);
}
if (statement.hasOrderClause()) {
executionResult = createOrderedResult(statement, executionResult);
}
return executionResult;
}
private void addHfqlWhereClausesToSearchParameterMap(HfqlStatement statement, SearchParameterMap map) {
List<HfqlStatement.WhereClause> searchClauses = statement.getWhereClauses();
for (HfqlStatement.WhereClause nextSearchClause : searchClauses) {
if (nextSearchClause.getOperator() != HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH) {
continue;
}
if (!"id".equals(nextSearchClause.getLeft())) {
throw new InvalidRequestException(
Msg.code(2412) + "search_match function can only be applied to the id element");
}
if (nextSearchClause.getRight().size() != 2) {
throw new InvalidRequestException(Msg.code(2413) + "search_match function requires 2 arguments");
}
List<String> argumentStrings = nextSearchClause.getRightAsStrings();
String paramName = argumentStrings.get(0);
String paramValueUnsplit = argumentStrings.get(1);
List<String> paramValues = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, paramValueUnsplit);
if (paramName.equals(Constants.PARAM_ID)) {
map.add(Constants.PARAM_ID, new TokenOrListParam(null, paramValues.toArray(EMPTY_STRING_ARRAY)));
} else if (paramName.equals(Constants.PARAM_LASTUPDATED)) {
DateOrListParam param = new DateOrListParam();
for (String nextValue : paramValues) {
param.addOr(new DateParam(nextValue));
}
map.add(Constants.PARAM_LASTUPDATED, param);
} else if (paramName.startsWith("_")) {
throw newInvalidRequestExceptionUnknownSearchParameter(paramName);
} else {
QualifierDetails qualifiedParamName = QualifierDetails.extractQualifiersFromParameterName(paramName);
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(
statement.getFromResourceName(), qualifiedParamName.getParamName());
if (searchParam == null) {
throw newInvalidRequestExceptionUnknownSearchParameter(paramName);
}
QualifiedParamList values = new QualifiedParamList();
values.setQualifier(qualifiedParamName.getWholeQualifier());
values.addAll(paramValues);
IQueryParameterAnd<?> andParam = JpaParamUtil.parseQueryParams(
myFhirContext, searchParam.getParamType(), paramName, List.of(values));
map.add(qualifiedParamName.getParamName(), andParam);
}
}
}
private IHfqlExecutionResult createOrderedResult(
HfqlStatement theStatement, IHfqlExecutionResult theExecutionResult) {
List<IHfqlExecutionResult.Row> rows = new ArrayList<>();
while (theExecutionResult.hasNext()) {
IHfqlExecutionResult.Row nextRow = theExecutionResult.getNextRow();
rows.add(nextRow);
Validate.isTrue(
rows.size() <= HfqlConstants.ORDER_AND_GROUP_LIMIT,
"Can not ORDER BY result sets over %d results",
HfqlConstants.ORDER_AND_GROUP_LIMIT);
}
List<Integer> orderColumnIndexes = theStatement.getOrderByClauses().stream()
.map(t -> {
int index = theStatement.findSelectClauseIndex(t.getClause());
if (index == -1) {
throw new InvalidRequestException(
Msg.code(2407) + "Invalid/unknown ORDER BY clause: " + t.getClause());
}
return index;
})
.collect(Collectors.toList());
List<Boolean> orderAscending = theStatement.getOrderByClauses().stream()
.map(HfqlStatement.OrderByClause::isAscending)
.collect(Collectors.toList());
Comparator<IHfqlExecutionResult.Row> comparator = null;
for (int i = 0; i < orderColumnIndexes.size(); i++) {
int columnIndex = orderColumnIndexes.get(i);
HfqlDataTypeEnum dataType = theExecutionResult
.getStatement()
.getSelectClauses()
.get(columnIndex)
.getDataType();
Comparator<IHfqlExecutionResult.Row> nextComparator = newRowComparator(columnIndex, dataType);
if (!orderAscending.get(i)) {
nextComparator = nextComparator.reversed();
}
if (comparator == null) {
comparator = nextComparator;
} else {
comparator = comparator.thenComparing(nextComparator);
}
}
rows.sort(comparator);
for (int i = 0; i < rows.size(); i++) {
rows.set(i, rows.get(i).toRowOffset(i));
}
List<List<Object>> rowData =
rows.stream().map(IHfqlExecutionResult.Row::getRowValues).collect(Collectors.toList());
return new StaticHfqlExecutionResult(null, theStatement, rowData);
}
@Override
public IHfqlExecutionResult executeContinuation(
HfqlStatement theStatement,
String theSearchId,
int theStartingOffset,
Integer theLimit,
RequestDetails theRequestDetails) {
IBundleProvider resultList = myPagingProvider.retrieveResultList(theRequestDetails, theSearchId);
HfqlExecutionContext executionContext = new HfqlExecutionContext(myFhirContext.newFhirPath());
Predicate<IBaseResource> whereClausePredicate = newWhereClausePredicate(executionContext, theStatement);
return new LocalSearchHfqlExecutionResult(
theStatement,
resultList,
executionContext,
theLimit,
theStartingOffset,
whereClausePredicate,
myFhirContext);
}
private IHfqlExecutionResult executeCountClause(
HfqlStatement theStatement,
HfqlExecutionContext theExecutionContext,
IBundleProvider theOutcome,
Predicate<IBaseResource> theWhereClausePredicate) {
Set<String> selectClauses = theStatement.getSelectClauses().stream()
.filter(t -> t.getOperator() == HfqlStatement.SelectClauseOperator.SELECT)
.map(HfqlStatement.SelectClause::getClause)
.collect(Collectors.toSet());
for (String next : selectClauses) {
if (!theStatement.getGroupByClauses().contains(next)) {
throw newInvalidRequestCountWithSelectOnNonGroupedClause(next);
}
}
Set<String> countClauses = theStatement.getSelectClauses().stream()
.filter(t -> t.getOperator() == HfqlStatement.SelectClauseOperator.COUNT)
.map(HfqlStatement.SelectClause::getClause)
.collect(Collectors.toSet());
Map<GroupByKey, Map<String, AtomicInteger>> keyCounter = new HashMap<>();
int offset = 0;
int batchSize = 1000;
while (theOutcome.size() == null || theOutcome.sizeOrThrowNpe() > offset) {
List<IBaseResource> resources = theOutcome.getResources(offset, offset + batchSize);
for (IBaseResource nextResource : resources) {
if (nextResource != null && theWhereClausePredicate.test(nextResource)) {
List<List<String>> groupByClauseValues = new ArrayList<>();
for (String nextClause : theStatement.getGroupByClauses()) {
List<String> nextClauseValues =
theExecutionContext.evaluate(nextResource, nextClause, IPrimitiveType.class).stream()
.map(IPrimitiveType::getValueAsString)
.collect(Collectors.toList());
if (nextClauseValues.isEmpty()) {
nextClauseValues.add(null);
}
groupByClauseValues.add(nextClauseValues);
}
Set<GroupByKey> allKeys = createCrossProduct(groupByClauseValues);
for (GroupByKey nextKey : allKeys) {
Map<String, AtomicInteger> counts = keyCounter.computeIfAbsent(nextKey, t -> new HashMap<>());
if (keyCounter.size() >= HfqlConstants.ORDER_AND_GROUP_LIMIT) {
throw new InvalidRequestException(Msg.code(2402) + "Can not group on > "
+ HfqlConstants.ORDER_AND_GROUP_LIMIT + " terms");
}
for (String nextCountClause : countClauses) {
if (!nextCountClause.equals("*")) {
if (theExecutionContext
.evaluateFirst(nextResource, nextCountClause, IBase.class)
.isEmpty()) {
continue;
}
}
counts.computeIfAbsent(nextCountClause, k -> new AtomicInteger())
.incrementAndGet();
}
}
}
}
offset += batchSize;
}
List<List<Object>> rows = new ArrayList<>();
for (Map.Entry<GroupByKey, Map<String, AtomicInteger>> nextEntry : keyCounter.entrySet()) {
List<Object> nextRow = new ArrayList<>();
for (HfqlStatement.SelectClause nextSelectClause : theStatement.getSelectClauses()) {
if (nextSelectClause.getOperator() == HfqlStatement.SelectClauseOperator.SELECT) {
int groupByIndex = theStatement.getGroupByClauses().indexOf(nextSelectClause.getClause());
nextRow.add(nextEntry.getKey().getNames().get(groupByIndex));
} else {
AtomicInteger counter = nextEntry.getValue().get(nextSelectClause.getClause());
if (counter != null) {
nextRow.add(counter.intValue());
} else {
nextRow.add(0);
}
}
}
rows.add(nextRow);
}
return new StaticHfqlExecutionResult(null, theStatement, rows);
}
private Set<GroupByKey> createCrossProduct(List<List<String>> theGroupByClauseValues) {
if (theGroupByClauseValues.isEmpty()) {
return NULL_GROUP_BY_KEY;
}
Set<GroupByKey> retVal = new HashSet<>();
List<String> valueHolder = new ArrayList<>();
createCrossProductRecurse(theGroupByClauseValues, retVal, valueHolder);
return retVal;
}
private void createCrossProductRecurse(
List<List<String>> theGroupByClauseValues,
Set<GroupByKey> theGroupsSetToPopulate,
List<String> theCurrentValueChain) {
List<String> nextOptions = theGroupByClauseValues.get(0);
for (String nextOption : nextOptions) {
theCurrentValueChain.add(nextOption);
if (theGroupByClauseValues.size() == 1) {
theGroupsSetToPopulate.add(new GroupByKey(theCurrentValueChain));
} else {
createCrossProductRecurse(
theGroupByClauseValues.subList(1, theGroupByClauseValues.size()),
theGroupsSetToPopulate,
theCurrentValueChain);
}
theCurrentValueChain.remove(theCurrentValueChain.size() - 1);
}
}
private Predicate<IBaseResource> newWhereClausePredicate(
HfqlExecutionContext theExecutionContext, HfqlStatement theStatement) {
return r -> {
for (HfqlStatement.WhereClause nextWhereClause : theStatement.getWhereClauses()) {
boolean haveMatch;
try {
switch (nextWhereClause.getOperator()) {
case SEARCH_MATCH:
// These are handled earlier so we don't need to test here
haveMatch = true;
break;
case UNARY_BOOLEAN: {
haveMatch = evaluateWhereClauseUnaryBoolean(theExecutionContext, r, nextWhereClause);
break;
}
case EQUALS:
case IN:
default: {
haveMatch = evaluateWhereClauseBinaryEqualsOrIn(theExecutionContext, r, nextWhereClause);
break;
}
}
} catch (FhirPathExecutionException e) {
throw new InvalidRequestException(Msg.code(2403) + "Unable to evaluate FHIRPath expression \""
+ nextWhereClause.getLeft() + "\". Error: " + e.getMessage());
}
if (!haveMatch) {
return false;
}
}
return true;
};
}
private void populateSelectColumnDataTypes(HfqlStatement statement) {
HfqlFhirPathParser fhirPathParser = new HfqlFhirPathParser(myFhirContext);
for (HfqlStatement.SelectClause nextSelectClause : statement.getSelectClauses()) {
HfqlDataTypeEnum nextType;
if (nextSelectClause.getOperator() == HfqlStatement.SelectClauseOperator.COUNT) {
nextType = HfqlDataTypeEnum.INTEGER;
} else {
String clause = nextSelectClause.getClause();
if (clause.equals("meta.versionId")) {
// FHIR's versionId field is a string, but in HAPI FHIR JPA it can only ever be a long so we'll
// use that type
nextType = HfqlDataTypeEnum.LONGINT;
} else {
nextType = fhirPathParser.determineDatatypeForPath(statement.getFromResourceName(), clause);
nextType = defaultIfNull(nextType, HfqlDataTypeEnum.STRING);
}
}
nextSelectClause.setDataType(nextType);
}
}
/**
* This method replaces a SELECT-ed column named "*" with a collection of
* available column names for the given resource type.
*/
private void massageSelectColumnNames(HfqlStatement theHfqlStatement) {
List<HfqlStatement.SelectClause> selectClauses = theHfqlStatement.getSelectClauses();
for (int i = 0; i < selectClauses.size(); i++) {
HfqlStatement.SelectClause selectClause = selectClauses.get(i);
if (selectClause.getOperator() == HfqlStatement.SelectClauseOperator.SELECT) {
if ("*".equals(selectClause.getClause())) {
resolveAndReplaceStarInSelectClauseAtIndex(theHfqlStatement, selectClauses, i);
}
}
}
}
private void resolveAndReplaceStarInSelectClauseAtIndex(
HfqlStatement theHfqlStatement, List<HfqlStatement.SelectClause> theSelectClauses, int theIndex) {
String resourceName = theHfqlStatement.getFromResourceName();
TreeSet<String> allLeafPaths = findLeafPaths(resourceName);
theSelectClauses.remove(theIndex);
List<String> reversedLeafPaths = new ArrayList<>(allLeafPaths);
reversedLeafPaths = Lists.reverse(reversedLeafPaths);
reversedLeafPaths.forEach(t -> theSelectClauses.add(theIndex, new HfqlStatement.SelectClause(t).setAlias(t)));
}
@Nonnull
private TreeSet<String> findLeafPaths(String theResourceName) {
TreeSet<String> allLeafPaths = new TreeSet<>();
RuntimeResourceDefinition def = myFhirContext.getResourceDefinition(theResourceName);
for (BaseRuntimeChildDefinition nextChild : def.getChildren()) {
for (String next : nextChild.getValidChildNames()) {
if (!"extension".equals(next) && !"modifierExtension".equals(next)) {
allLeafPaths.add(next);
}
}
}
return allLeafPaths;
}
/**
* Columns to return, per {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])}
* <OL>
* <LI><B>TABLE_CAT</B> String {@code =>} table catalog (may be {@code null})
* <LI><B>TABLE_SCHEM</B> String {@code =>} table schema (may be {@code null})
* <LI><B>TABLE_NAME</B> String {@code =>} table name
* <LI><B>TABLE_TYPE</B> String {@code =>} table type. Typical types are "TABLE",
* "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
* "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
* <LI><B>REMARKS</B> String {@code =>} explanatory comment on the table (may be {@code null})
* <LI><B>TYPE_CAT</B> String {@code =>} the types catalog (may be {@code null})
* <LI><B>TYPE_SCHEM</B> String {@code =>} the types schema (may be {@code null})
* <LI><B>TYPE_NAME</B> String {@code =>} type name (may be {@code null})
* <LI><B>SELF_REFERENCING_COL_NAME</B> String {@code =>} name of the designated
* "identifier" column of a typed table (may be {@code null})
* <LI><B>REF_GENERATION</B> String {@code =>} specifies how values in
* SELF_REFERENCING_COL_NAME are created. Values are
* "SYSTEM", "USER", "DERIVED". (may be {@code null})
* </OL>
*/
@Override
public IHfqlExecutionResult introspectTables() {
List<String> columns = List.of(
"TABLE_CAT",
"TABLE_SCHEM",
"TABLE_NAME",
"TABLE_TYPE",
"REMARKS",
"TYPE_CAT",
"TYPE_SCHEM",
"TYPE_NAME",
"SELF_REFERENCING_COL_NAME",
"REF_GENERATION");
List<HfqlDataTypeEnum> dataTypes = List.of(
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING,
HfqlDataTypeEnum.STRING);
List<List<Object>> rows = new ArrayList<>();
TreeSet<String> resourceTypes = new TreeSet<>(myFhirContext.getResourceTypes());
for (String next : resourceTypes) {
rows.add(Lists.newArrayList(null, null, next, "TABLE", null, null, null, null, null, null));
}
return new StaticHfqlExecutionResult(null, columns, dataTypes, rows);
}
/**
* Columns from {@link java.sql.DatabaseMetaData#getColumns(String, String, String, String)}
*
* <OL>
* <LI><B>TABLE_CAT</B> String {@code =>} table catalog (may be {@code null})
* <LI><B>TABLE_SCHEM</B> String {@code =>} table schema (may be {@code null})
* <LI><B>TABLE_NAME</B> String {@code =>} table name
* <LI><B>COLUMN_NAME</B> String {@code =>} column name
* <LI><B>DATA_TYPE</B> int {@code =>} SQL type from java.sql.Types
* <LI><B>TYPE_NAME</B> String {@code =>} Data source dependent type name,
* for a UDT the type name is fully qualified
* <LI><B>COLUMN_SIZE</B> int {@code =>} column size.
* <LI><B>BUFFER_LENGTH</B> is not used.
* <LI><B>DECIMAL_DIGITS</B> int {@code =>} the number of fractional digits. Null is returned for data types where
* DECIMAL_DIGITS is not applicable.
* <LI><B>NUM_PREC_RADIX</B> int {@code =>} Radix (typically either 10 or 2)
* <LI><B>NULLABLE</B> int {@code =>} is NULL allowed.
* <UL>
* <LI> columnNoNulls - might not allow {@code NULL} values
* <LI> columnNullable - definitely allows {@code NULL} values
* <LI> columnNullableUnknown - nullability unknown
* </UL>
* <LI><B>REMARKS</B> String {@code =>} comment describing column (may be {@code null})
* <LI><B>COLUMN_DEF</B> String {@code =>} default value for the column, which should be interpreted as a string when the value is enclosed in single quotes (may be {@code null})
* <LI><B>SQL_DATA_TYPE</B> int {@code =>} unused
* <LI><B>SQL_DATETIME_SUB</B> int {@code =>} unused
* <LI><B>CHAR_OCTET_LENGTH</B> int {@code =>} for char types the
* maximum number of bytes in the column
* <LI><B>ORDINAL_POSITION</B> int {@code =>} index of column in table
* (starting at 1)
* <LI><B>IS_NULLABLE</B> String {@code =>} ISO rules are used to determine the nullability for a column.
* <UL>
* <LI> YES --- if the column can include NULLs
* <LI> NO --- if the column cannot include NULLs
* <LI> empty string --- if the nullability for the
* column is unknown
* </UL>
* <LI><B>SCOPE_CATALOG</B> String {@code =>} catalog of table that is the scope
* of a reference attribute ({@code null} if DATA_TYPE isn't REF)
* <LI><B>SCOPE_SCHEMA</B> String {@code =>} schema of table that is the scope
* of a reference attribute ({@code null} if the DATA_TYPE isn't REF)
* <LI><B>SCOPE_TABLE</B> String {@code =>} table name that this the scope
* of a reference attribute ({@code null} if the DATA_TYPE isn't REF)
* <LI><B>SOURCE_DATA_TYPE</B> short {@code =>} source type of a distinct type or user-generated
* Ref type, SQL type from java.sql.Types ({@code null} if DATA_TYPE
* isn't DISTINCT or user-generated REF)
* <LI><B>IS_AUTOINCREMENT</B> String {@code =>} Indicates whether this column is auto incremented
* <UL>
* <LI> YES --- if the column is auto incremented
* <LI> NO --- if the column is not auto incremented
* <LI> empty string --- if it cannot be determined whether the column is auto incremented
* </UL>
* <LI><B>IS_GENERATEDCOLUMN</B> String {@code =>} Indicates whether this is a generated column
* <UL>
* <LI> YES --- if this a generated column
* <LI> NO --- if this not a generated column
* <LI> empty string --- if it cannot be determined whether this is a generated column
* </UL>
* </OL>
*
* @param theTableName The table name or null
* @param theColumnName The column name or null
*/
@Override
public IHfqlExecutionResult introspectColumns(@Nullable String theTableName, @Nullable String theColumnName) {
List<String> columns = List.of(
"TABLE_CAT",
"TABLE_SCHEM",
"TABLE_NAME",
"COLUMN_NAME",
"DATA_TYPE",
"TYPE_NAME",
"COLUMN_SIZE",
"BUFFER_LENGTH",
"DECIMAL_DIGITS",
"NUM_PREC_RADIX",
"NULLABLE",
"REMARKS",
"COLUMN_DEF",
"SQL_DATA_TYPE",
"SQL_DATETIME_SUB",
"CHAR_OCTET_LENGTH",
"ORDINAL_POSITION",
"IS_NULLABLE",
"SCOPE_CATALOG",
"SCOPE_SCHEMA",
"SCOPE_TABLE",
"SOURCE_DATA_TYPE",
"IS_AUTOINCREMENT",
"IS_GENERATEDCOLUMN");
List<HfqlDataTypeEnum> dataTypes = List.of(
HfqlDataTypeEnum.STRING, // TABLE_CAT
HfqlDataTypeEnum.STRING, // TABLE_SCHEM
HfqlDataTypeEnum.STRING, // TABLE_NAME
HfqlDataTypeEnum.STRING, // COLUMN_NAME
HfqlDataTypeEnum.INTEGER, // DATA_TYPE
HfqlDataTypeEnum.STRING, // TYPE_NAME
HfqlDataTypeEnum.INTEGER, // COLUMN_SIZE
HfqlDataTypeEnum.STRING, // BUFFER_LENGTH
HfqlDataTypeEnum.INTEGER, // DECIMAL_DIGITS
HfqlDataTypeEnum.INTEGER, // NUM_PREC_RADIX
HfqlDataTypeEnum.INTEGER, // NULLABLE
HfqlDataTypeEnum.STRING, // REMARKS
HfqlDataTypeEnum.STRING, // COLUMN_DEF
HfqlDataTypeEnum.INTEGER, // SQL_DATA_TYPE
HfqlDataTypeEnum.INTEGER, // SQL_DATETIME_SUB
HfqlDataTypeEnum.INTEGER, // CHAR_OCTET_LENGTH
HfqlDataTypeEnum.INTEGER, // ORDINAL_POSITION
HfqlDataTypeEnum.STRING, // IS_NULLABLE
HfqlDataTypeEnum.STRING, // SCOPE_CATALOG
HfqlDataTypeEnum.STRING, // SCOPE_SCHEMA
HfqlDataTypeEnum.STRING, // SCOPE_TABLE
HfqlDataTypeEnum.STRING, // SOURCE_DATA_TYPE
HfqlDataTypeEnum.STRING, // IS_AUTOINCREMENT
HfqlDataTypeEnum.STRING // IS_GENERATEDCOLUMN
);
List<List<Object>> rows = new ArrayList<>();
for (String nextResourceType : new TreeSet<>(myFhirContext.getResourceTypes())) {
if (isBlank(theTableName) || theTableName.equals(nextResourceType)) {
TreeSet<String> leafPaths = findLeafPaths(nextResourceType);
int position = 1;
for (String nextLeafPath : leafPaths) {
if (isBlank(theColumnName) || theColumnName.equals(nextLeafPath)) {
rows.add(Lists.newArrayList(
null,
null,
nextResourceType,
nextLeafPath,
Types.VARCHAR,
"string",
-1,
null,
null,
null,
1, // nullable
null,
null,
null,
null,
null,
position++,
"YES",
null,
null,
null,
null,
"NO",
"NO"));
}
}
}
}
return new StaticHfqlExecutionResult(null, columns, dataTypes, rows);
}
@SuppressWarnings("unchecked")
static Comparator<IHfqlExecutionResult.Row> newRowComparator(int columnIndex, HfqlDataTypeEnum dataType) {
return Comparator.comparing(new RowValueExtractor(columnIndex, dataType));
}
private static boolean evaluateWhereClauseUnaryBoolean(
HfqlExecutionContext theExecutionContext, IBaseResource r, HfqlStatement.WhereClause theNextWhereClause) {
boolean haveMatch = false;
assert theNextWhereClause.getRight().isEmpty();
List<IPrimitiveType> values =
theExecutionContext.evaluate(r, theNextWhereClause.getLeft(), IPrimitiveType.class);
for (IPrimitiveType<?> nextValue : values) {
if (Boolean.TRUE.equals(nextValue.getValue())) {
haveMatch = true;
break;
}
}
return haveMatch;
}
private static boolean evaluateWhereClauseBinaryEqualsOrIn(
HfqlExecutionContext theExecutionContext, IBaseResource r, HfqlStatement.WhereClause theNextWhereClause) {
boolean haveMatch = false;
List<IBase> values = theExecutionContext.evaluate(r, theNextWhereClause.getLeft(), IBase.class);
for (IBase nextValue : values) {
for (String nextRight : theNextWhereClause.getRight()) {
String expression = "$this = " + nextRight;
IPrimitiveType outcome = theExecutionContext
.evaluateFirst(nextValue, expression, IPrimitiveType.class)
.orElseThrow(IllegalStateException::new);
Boolean value = (Boolean) outcome.getValue();
haveMatch = value;
if (haveMatch) {
break;
}
}
if (haveMatch) {
break;
}
}
return haveMatch;
}
@Nonnull
private static InvalidRequestException newInvalidRequestExceptionUnknownSearchParameter(String theParamName) {
return new InvalidRequestException(
"Unknown/unsupported search parameter: " + UrlUtil.sanitizeUrlPart(theParamName));
}
@Nonnull
private static InvalidRequestException newInvalidRequestCountWithSelectOnNonGroupedClause(String theClause) {
return new InvalidRequestException(
"Unable to select on non-grouped column in a count expression: " + UrlUtil.sanitizeUrlPart(theClause));
}
private static class RowValueExtractor implements Function<IHfqlExecutionResult.Row, Comparable> {
private final int myColumnIndex;
private final HfqlDataTypeEnum myDataType;
public RowValueExtractor(int theColumnIndex, HfqlDataTypeEnum theDataType) {
myColumnIndex = theColumnIndex;
myDataType = theDataType;
}
@Override
public Comparable apply(IHfqlExecutionResult.Row theRow) {
Comparable retVal = (Comparable) theRow.getRowValues().get(myColumnIndex);
switch (myDataType) {
case STRING:
case TIME:
case JSON:
retVal = defaultIfNull(retVal, "");
break;
case LONGINT:
case INTEGER:
if (retVal instanceof Number) {
return retVal;
} else if (retVal == null) {
retVal = Long.MIN_VALUE;
} else {
retVal = Long.parseLong((String) retVal);
}
break;
case BOOLEAN:
if (retVal == null) {
retVal = Boolean.FALSE;
} else {
retVal = Boolean.parseBoolean((String) retVal);
}
break;
case DATE:
case TIMESTAMP:
if (retVal != null) {
retVal = new DateTimeType((String) retVal).getValue();
}
if (retVal == null) {
retVal = new Date(Long.MIN_VALUE);
}
break;
case DECIMAL:
if (retVal == null) {
retVal = BigDecimal.valueOf(Long.MIN_VALUE);
} else {
retVal = new BigDecimal((String) retVal);
}
break;
}
return retVal;
}
}
private static class GroupByKey {
private final int myHashCode;
private List<String> myNames;
/**
* @param theNames A copy of the list will be stored
*/
public GroupByKey(List<String> theNames) {
myNames = new ArrayList<>(theNames);
HashCodeBuilder hashCodeBuilder = new HashCodeBuilder();
myNames.forEach(hashCodeBuilder::append);
myHashCode = hashCodeBuilder.toHashCode();
}
@Override
public boolean equals(Object theO) {
boolean retVal = false;
if (theO instanceof GroupByKey) {
List<String> otherNames = ((GroupByKey) theO).myNames;
retVal = ListUtils.isEqualList(myNames, otherNames);
}
return retVal;
}
@Override
public int hashCode() {
return myHashCode;
}
public List<String> getNames() {
return myNames;
}
}
public static class HfqlExecutionContext {
private final Map<String, IFhirPath.IParsedExpression> myFhirPathExpressionMap = new HashMap<>();
private final IFhirPath myFhirPath;
public HfqlExecutionContext(IFhirPath theFhirPath) {
myFhirPath = theFhirPath;
}
public <T extends IBase> List<T> evaluate(IBase theInput, String thePath, Class<T> theReturnType) {
IFhirPath.IParsedExpression parsedExpression = getParsedExpression(thePath);
return myFhirPath.evaluate(theInput, parsedExpression, theReturnType);
}
<T extends IBase> Optional<T> evaluateFirst(IBase theInput, String thePath, Class<T> theReturnType) {
IFhirPath.IParsedExpression parsedExpression = getParsedExpression(thePath);
return myFhirPath.evaluateFirst(theInput, parsedExpression, theReturnType);
}
private IFhirPath.IParsedExpression getParsedExpression(String thePath) {
IFhirPath.IParsedExpression parsedExpression = myFhirPathExpressionMap.get(thePath);
if (parsedExpression == null) {
try {
parsedExpression = myFhirPath.parse(thePath);
} catch (Exception e) {
throw new InvalidRequestException(Msg.code(2404) + e.getMessage(), e);
}
myFhirPathExpressionMap.put(thePath, parsedExpression);
}
return parsedExpression;
}
}
}

View File

@ -0,0 +1,89 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import java.util.List;
/**
* This interface represents a ResultSet returned by the HFQL query layer in
* {@link IHfqlExecutor}. Think of it as roughly equivalent to the JDBC
* {@link java.sql.ResultSet} except that it's the internal version of that.
* <p>
* There are several implementations of this interface:
* <ul>
* <li>
* {@link LocalSearchHfqlExecutionResult} - Implementation backed by a database search.
* This is used inside the HAPI FHIR server that is handling HFQL queries.
* </li>
* <li>
* {@link StaticHfqlExecutionResult} - Static implementation with fixed results. This is
* usually used to represent errors and failed queries inside the HAPI FHIR server.
* </li>
* <li>
* {@link ca.uhn.fhir.jpa.fql.jdbc.RemoteHfqlExecutionResult} - This is used inside the
* JDBC driver (ie. remote from the HAPI FHIR server) and holds results that have
* been received over the wire.
* </li>
* </ul>
* </p>
*/
public interface IHfqlExecutionResult {
int ROW_OFFSET_ERROR = -1;
boolean hasNext();
Row getNextRow();
boolean isClosed();
void close();
String getSearchId();
int getLimit();
HfqlStatement getStatement();
class Row {
private final List<Object> myRowValues;
private final int myRowOffset;
public Row(int theRowOffset, List<Object> theRowValues) {
myRowOffset = theRowOffset;
myRowValues = theRowValues;
}
public int getRowOffset() {
return myRowOffset;
}
public List<Object> getRowValues() {
return myRowValues;
}
public Row toRowOffset(int theRowOffset) {
return new Row(theRowOffset, myRowValues);
}
}
}

View File

@ -0,0 +1,70 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import javax.annotation.Nullable;
public interface IHfqlExecutor {
/**
* Execute a FQL query and return the first page of data
*
* @param theStatement The FQL statement to execute
* @param theLimit The maximum number of records to retrieve
* @param theRequestDetails The request details associated with the request
* @return Returns a {@link IHfqlExecutionResult result object}. Note that the returned object is not thread safe.
*/
IHfqlExecutionResult executeInitialSearch(String theStatement, Integer theLimit, RequestDetails theRequestDetails);
/**
* Load a subsequent page of data from a search initiated by a call to {@link #executeInitialSearch(String, Integer, RequestDetails)}.
*
* @param theStatement The parsed statement from the initial search. Available through a call to {@link IHfqlExecutionResult#getStatement()}.
* @param theSearchId The search ID from the initial search. Available through a call to {@link IHfqlExecutionResult#getSearchId()}.
* @param theLimit The maximum number of results to return (across all pages)
* @param theRequestDetails The request details associated with the request
* @param theStartingOffset The row offset count for the first result to return. This should be set to one higher than the last value returned by {@link IHfqlExecutionResult.Row#getRowOffset()}.
*/
IHfqlExecutionResult executeContinuation(
HfqlStatement theStatement,
String theSearchId,
int theStartingOffset,
Integer theLimit,
RequestDetails theRequestDetails);
/**
* Provides a list of "tables", which are actually resource types, in order to
* support the JCBC {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])}
* query.
*/
IHfqlExecutionResult introspectTables();
/**
* Provides a list of "columns", which are actually selected valid FHIRPath expressions
* that can be selected on a resource
*
* @param theTableName The table name or null
* @param theColumnName The column name or null
*/
IHfqlExecutionResult introspectColumns(@Nullable String theTableName, @Nullable String theColumnName);
}

View File

@ -0,0 +1,230 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.function.Predicate;
/**
* @see IHfqlExecutionResult for information about the purpose of this class
*/
public class LocalSearchHfqlExecutionResult implements IHfqlExecutionResult {
private static final Logger ourLog = LoggerFactory.getLogger(LocalSearchHfqlExecutionResult.class);
private final IBundleProvider mySearchResult;
private final HfqlExecutor.HfqlExecutionContext myExecutionContext;
private final Integer myLimit;
private final HfqlStatement myStatement;
private final Predicate<IBaseResource> myWhereClausePredicate;
private final IParser myParser;
private int myTotalRowsFetched = 0;
private int myNextSearchResultRow;
private int myNextBatchRow = 0;
private List<IBaseResource> myNextBatch;
private IBaseResource myNextResource;
private boolean myExhausted = false;
private int myNextResourceSearchRow;
private Row myErrorRow;
public LocalSearchHfqlExecutionResult(
HfqlStatement theStatement,
IBundleProvider theSearchResult,
HfqlExecutor.HfqlExecutionContext theExecutionContext,
Integer theLimit,
int theInitialOffset,
Predicate<IBaseResource> theWhereClausePredicate,
FhirContext theFhirContext) {
myStatement = theStatement;
mySearchResult = theSearchResult;
myExecutionContext = theExecutionContext;
myLimit = theLimit;
myNextSearchResultRow = theInitialOffset;
myWhereClausePredicate = theWhereClausePredicate;
myParser = theFhirContext.newJsonParser();
}
@Override
public boolean hasNext() {
fetchNextResource();
return myNextResource != null;
}
private void fetchNextResource() {
if (myNextResource != null) {
return;
}
try {
while (myNextResource == null && !myExhausted) {
if (myNextBatch == null) {
int from = myNextSearchResultRow;
int to = myNextSearchResultRow + HfqlExecutor.BATCH_SIZE;
myNextBatch = mySearchResult.getResources(from, to);
ourLog.info(
"HFQL fetching resources {}-{} - Total {} fetched, {} retained and limit {}",
from,
to,
myNextSearchResultRow,
myTotalRowsFetched,
myLimit);
myNextBatchRow = 0;
myNextSearchResultRow += HfqlExecutor.BATCH_SIZE;
}
if (myNextBatch.isEmpty()) {
myExhausted = true;
} else if (myNextBatch.size() > myNextBatchRow) {
myNextResource = myNextBatch.get(myNextBatchRow);
myNextResourceSearchRow = (myNextSearchResultRow - HfqlExecutor.BATCH_SIZE) + myNextBatchRow;
myNextBatchRow++;
} else {
myNextBatch = null;
}
if (myNextResource != null && !myWhereClausePredicate.test(myNextResource)) {
myNextResource = null;
}
}
if (myNextResource != null) {
myTotalRowsFetched++;
if (myLimit != null && myTotalRowsFetched >= myLimit) {
myExhausted = true;
}
}
} catch (Exception e) {
createAndStoreErrorRow(e.getMessage());
}
}
@Override
public Row getNextRow() {
fetchNextResource();
if (myErrorRow != null) {
Row errorRow = myErrorRow;
myErrorRow = null;
return errorRow;
}
Validate.isTrue(myNextResource != null, "No more results");
List<Object> values = new ArrayList<>();
for (int columnIndex = 0; columnIndex < myStatement.getSelectClauses().size(); columnIndex++) {
HfqlStatement.SelectClause nextColumn =
myStatement.getSelectClauses().get(columnIndex);
String clause = nextColumn.getClause();
HfqlDataTypeEnum columnDataType = nextColumn.getDataType();
List<IBase> columnValues;
try {
columnValues = myExecutionContext.evaluate(myNextResource, clause, IBase.class);
} catch (Exception e) {
String errorMessage =
"Failed to evaluate FHIRPath expression \"" + clause + "\". Error: " + e.getMessage();
return createAndStoreErrorRow(errorMessage);
}
String value = null;
if (columnDataType == HfqlDataTypeEnum.JSON) {
StringBuilder b = new StringBuilder();
b.append("[");
for (Iterator<IBase> valueIter = columnValues.iterator(); valueIter.hasNext(); ) {
IBase next = valueIter.next();
if (next instanceof IPrimitiveType) {
b.append('"');
String encodedValue = encodeValue(next);
encodedValue = encodedValue.replace("\\", "\\\\").replace("\"", "\\\"");
b.append(encodedValue);
b.append('"');
} else {
b.append(encodeValue(next));
}
if (valueIter.hasNext()) {
b.append(", ");
}
}
b.append("]");
value = b.toString();
} else {
if (!columnValues.isEmpty()) {
IBase firstColumnValue = columnValues.get(0);
value = encodeValue(firstColumnValue);
}
}
values.add(value);
}
myNextResource = null;
return new Row(myNextResourceSearchRow, values);
}
private String encodeValue(IBase firstColumnValue) {
String value = null;
if (firstColumnValue instanceof IIdType) {
value = ((IIdType) firstColumnValue).getIdPart();
} else if (firstColumnValue != null) {
value = myParser.encodeToString(firstColumnValue);
}
return value;
}
private Row createAndStoreErrorRow(String errorMessage) {
myExhausted = true;
myNextResource = null;
myErrorRow = new Row(IHfqlExecutionResult.ROW_OFFSET_ERROR, List.of(errorMessage));
return myErrorRow;
}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {
// ignore
}
@Override
public String getSearchId() {
return mySearchResult.getUuid();
}
@Override
public int getLimit() {
return myLimit != null ? myLimit : -1;
}
@Override
public HfqlStatement getStatement() {
return myStatement;
}
}

View File

@ -0,0 +1,124 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.executor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nullable;
/**
* @see IHfqlExecutionResult for information about the purpose of this class
*/
public class StaticHfqlExecutionResult implements IHfqlExecutionResult {
private final String mySearchId;
private final Iterator<List<Object>> myRowsIterator;
private int myNextRowOffset;
private HfqlStatement myStatement;
/**
* Constructor for an empty result
*
* @param theSearchId The search ID associated with this result
*/
public StaticHfqlExecutionResult(@Nullable String theSearchId) {
this(theSearchId, new HfqlStatement(), Collections.emptyList());
}
/**
* Constructor for an empty result
*
* @param theSearchId The search ID associated with this result
*/
public StaticHfqlExecutionResult(
@Nullable String theSearchId,
List<String> theColumnNames,
List<HfqlDataTypeEnum> theDataTypes,
List<List<Object>> theRows) {
this(theSearchId, toStatement(theColumnNames, theDataTypes), theRows);
}
private static HfqlStatement toStatement(List<String> theColumnNames, List<HfqlDataTypeEnum> theDataTypes) {
assert theColumnNames.size() == theDataTypes.size();
HfqlStatement retVal = new HfqlStatement();
for (int i = 0; i < theColumnNames.size(); i++) {
retVal.addSelectClause(theColumnNames.get(i))
.setAlias(theColumnNames.get(i))
.setDataType(theDataTypes.get(i));
}
return retVal;
}
/**
* Constructor
*/
public StaticHfqlExecutionResult(
@Nullable String theSearchId, HfqlStatement theStatement, List<List<Object>> theRows) {
mySearchId = theSearchId;
myStatement = theStatement;
myRowsIterator = theRows.iterator();
myNextRowOffset = 0;
}
@Override
public boolean hasNext() {
return myRowsIterator.hasNext();
}
@Override
public Row getNextRow() {
return new Row(myNextRowOffset++, myRowsIterator.next());
}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {
// ignore
}
@Override
public String getSearchId() {
return mySearchId;
}
@Override
public int getLimit() {
return 0;
}
@Override
public HfqlStatement getStatement() {
return myStatement;
}
public static IHfqlExecutionResult withError(String theErrorMessage) {
StaticHfqlExecutionResult retVal = new StaticHfqlExecutionResult(
null, List.of("Error"), List.of(HfqlDataTypeEnum.STRING), List.of(List.of(theErrorMessage)));
retVal.myNextRowOffset = IHfqlExecutionResult.ROW_OFFSET_ERROR;
return retVal;
}
}

View File

@ -0,0 +1,83 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.rest.client.impl.HttpBasicAuthInterceptor;
import ca.uhn.fhir.util.IoUtil;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.lang3.Validate;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.hl7.fhir.r4.model.Parameters;
import java.sql.SQLException;
import java.util.concurrent.TimeUnit;
import static ca.uhn.fhir.jpa.fql.util.HfqlConstants.DEFAULT_FETCH_SIZE;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This is the HTTP/REST client used by the JDBC driver to talk to the FHIR server.
* We don't use the HAPI FHIR REST client even though we're talking to a HAPI FHIR
* REST server because the operation we're calling returns CSV data instead of
* FHIR data. Instead, we just use the Apache HTTPClient.
* <p>
* Ideally in the future I'd like to explore using JDK primitives instead of even
* using the Apache client or HAPI FHIR in order to reduce the dependencies required
* in the JDBC driver, but that can be a problem for the future.
*/
public class HfqlRestClient {
public static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT.withRecordSeparator('\n');
private final String myBaseUrl;
private final CloseableHttpClient myClient;
public HfqlRestClient(String theBaseUrl, String theUsername, String thePassword) {
myBaseUrl = theBaseUrl;
PoolingHttpClientConnectionManager connectionManager =
new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
connectionManager.setMaxTotal(99);
connectionManager.setDefaultMaxPerRoute(99);
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create()
.setConnectionManager(connectionManager)
.setMaxConnPerRoute(99);
if (isNotBlank(theUsername) && isNotBlank(thePassword)) {
httpClientBuilder.addInterceptorLast(new HttpBasicAuthInterceptor(theUsername, thePassword));
}
myClient = httpClientBuilder.build();
}
public IHfqlExecutionResult execute(
Parameters theRequestParameters, boolean theSupportsContinuations, Integer theFetchSize)
throws SQLException {
Integer fetchSize = theFetchSize;
fetchSize = defaultIfNull(fetchSize, DEFAULT_FETCH_SIZE);
Validate.isTrue(fetchSize > 0, "theFetchSize must be a positive integer, got: %s", fetchSize);
return new RemoteHfqlExecutionResult(
theRequestParameters, myBaseUrl, myClient, fetchSize, theSupportsContinuations);
}
public void close() {
IoUtil.closeQuietly(myClient);
}
}

View File

@ -0,0 +1,353 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.i18n.Msg;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
import javax.annotation.Nonnull;
class JdbcConnection implements Connection {
private final String myServerUrl;
private boolean myClosed;
private HfqlRestClient myClient;
private String myUsername;
private String myPassword;
public JdbcConnection(String theServerUrl) {
myServerUrl = theServerUrl;
}
@Override
public Statement createStatement() {
return new JdbcStatement(this);
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String nativeSQL(String sql) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean getAutoCommit() {
return false;
}
@Override
public void setAutoCommit(boolean autoCommit) {
// nothing
}
@Override
public void commit() {
// nothing
}
@Override
public void rollback() {
// nothing
}
@Override
public void close() {
myClosed = true;
}
@Override
public boolean isClosed() {
return myClosed;
}
@Override
public DatabaseMetaData getMetaData() {
return new JdbcDatabaseMetadata(this, getClient());
}
@Override
public boolean isReadOnly() {
return true;
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getCatalog() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setCatalog(String catalog) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getTransactionIsolation() {
return Connection.TRANSACTION_READ_COMMITTED;
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public SQLWarning getWarnings() {
return null;
}
@Override
public void clearWarnings() {
// nothing
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) {
return createStatement();
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getHoldability() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setHoldability(int holdability) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Savepoint setSavepoint() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) {
return createStatement(resultSetType, resultSetConcurrency);
}
@Override
public PreparedStatement prepareStatement(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public CallableStatement prepareCall(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Clob createClob() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Blob createBlob() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public NClob createNClob() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public SQLXML createSQLXML() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean isValid(int timeout) {
return true;
}
@Override
public void setClientInfo(String name, String value) {
// ignore
}
@Override
public String getClientInfo(String name) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Properties getClientInfo() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setClientInfo(Properties properties) {
// ignore
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getSchema() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setSchema(String schema) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void abort(Executor executor) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getNetworkTimeout() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public <T> T unwrap(Class<T> theInterface) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean isWrapperFor(Class<?> theInterface) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
public HfqlRestClient getClient() {
if (myClient == null) {
myClient = new HfqlRestClient(myServerUrl, myUsername, myPassword);
}
return myClient;
}
public void setUsername(String theUsername) {
myUsername = theUsername;
}
public void setPassword(String thePassword) {
myPassword = thePassword;
}
@Nonnull
static SQLException newSqlExceptionForUnsupportedOperation() {
return new SQLException(Msg.code(2394) + "This JDBC method is not yet supported by the HFQL JDBC Driver");
}
@Nonnull
static SQLFeatureNotSupportedException newSqlExceptionForFeatureNotSupported() {
return new SQLFeatureNotSupportedException(
Msg.code(2398) + "This JDBC method is not yet supported by the HFQL JDBC Driver");
}
}

View File

@ -0,0 +1,956 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.util.VersionUtil;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.Parameters;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForUnsupportedOperation;
public class JdbcDatabaseMetadata implements DatabaseMetaData {
private final Connection myConnection;
private final HfqlRestClient myRestClient;
public JdbcDatabaseMetadata(Connection theConnection, HfqlRestClient theRestClient) {
myConnection = theConnection;
myRestClient = theRestClient;
}
@Override
public boolean allProceduresAreCallable() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean allTablesAreSelectable() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getURL() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getUserName() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean isReadOnly() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean nullsAreSortedHigh() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean nullsAreSortedLow() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean nullsAreSortedAtStart() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean nullsAreSortedAtEnd() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getDatabaseProductName() throws SQLException {
return "HAPI FHIR";
}
@Override
public String getDatabaseProductVersion() throws SQLException {
return VersionUtil.getVersion();
}
@Override
public String getDriverName() throws SQLException {
return "HAPI FHIR FQL JDBC";
}
@Override
public String getDriverVersion() throws SQLException {
return VersionUtil.getVersion();
}
@Override
public int getDriverMajorVersion() {
return 1;
}
@Override
public int getDriverMinorVersion() {
return 1;
}
@Override
public boolean usesLocalFiles() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean usesLocalFilePerTable() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMixedCaseIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesUpperCaseIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesLowerCaseIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesMixedCaseIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getIdentifierQuoteString() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getSQLKeywords() throws SQLException {
return "";
}
@Override
public String getNumericFunctions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getStringFunctions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getSystemFunctions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getTimeDateFunctions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getSearchStringEscape() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getExtraNameCharacters() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsAlterTableWithAddColumn() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsAlterTableWithDropColumn() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsColumnAliasing() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean nullPlusNonNullIsNull() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsConvert() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsConvert(int fromType, int toType) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsTableCorrelationNames() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsDifferentTableCorrelationNames() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsExpressionsInOrderBy() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOrderByUnrelated() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsGroupBy() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsGroupByUnrelated() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsGroupByBeyondSelect() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsLikeEscapeClause() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMultipleResultSets() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMultipleTransactions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsNonNullableColumns() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMinimumSQLGrammar() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCoreSQLGrammar() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsExtendedSQLGrammar() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsANSI92EntryLevelSQL() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsANSI92IntermediateSQL() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsANSI92FullSQL() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsIntegrityEnhancementFacility() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOuterJoins() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsFullOuterJoins() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsLimitedOuterJoins() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getSchemaTerm() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getProcedureTerm() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getCatalogTerm() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean isCatalogAtStart() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public String getCatalogSeparator() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSchemasInDataManipulation() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSchemasInProcedureCalls() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSchemasInTableDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSchemasInIndexDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCatalogsInDataManipulation() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCatalogsInProcedureCalls() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCatalogsInTableDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCatalogsInIndexDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsPositionedDelete() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsPositionedUpdate() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSelectForUpdate() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsStoredProcedures() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSubqueriesInComparisons() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSubqueriesInExists() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSubqueriesInIns() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsSubqueriesInQuantifieds() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsCorrelatedSubqueries() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsUnion() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsUnionAll() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOpenCursorsAcrossCommit() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOpenCursorsAcrossRollback() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOpenStatementsAcrossCommit() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsOpenStatementsAcrossRollback() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxBinaryLiteralLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxCharLiteralLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnsInGroupBy() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnsInIndex() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnsInOrderBy() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnsInSelect() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxColumnsInTable() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxConnections() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxCursorNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxIndexLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxSchemaNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxProcedureNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxCatalogNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxRowSize() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxStatementLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxStatements() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxTableNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxTablesInSelect() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getMaxUserNameLength() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getDefaultTransactionIsolation() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsTransactions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsTransactionIsolationLevel(int level) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsDataManipulationTransactionsOnly() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean dataDefinitionCausesTransactionCommit() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean dataDefinitionIgnoredInTransactions() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getProcedureColumns(
String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types)
throws SQLException {
Parameters input = new Parameters();
input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES));
IHfqlExecutionResult outcome = myRestClient.execute(input, false, null);
return new JdbcResultSet(outcome);
}
@Override
public ResultSet getSchemas() throws SQLException {
// Empty result set
return new JdbcResultSet();
}
@Override
public ResultSet getCatalogs() throws SQLException {
// Empty result set
return new JdbcResultSet();
}
@Override
public ResultSet getTableTypes() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException, SQLException {
Parameters input = new Parameters();
input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS));
IHfqlExecutionResult outcome = myRestClient.execute(input, false, null);
return new JdbcResultSet(outcome);
}
@Override
public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException {
return new JdbcResultSet();
}
@Override
public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException {
return new JdbcResultSet();
}
@Override
public ResultSet getCrossReference(
String parentCatalog,
String parentSchema,
String parentTable,
String foreignCatalog,
String foreignSchema,
String foreignTable)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getTypeInfo() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsResultSetType(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean ownUpdatesAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean ownDeletesAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean ownInsertsAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean othersUpdatesAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean othersDeletesAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean othersInsertsAreVisible(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean updatesAreDetected(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean deletesAreDetected(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean insertsAreDetected(int type) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsBatchUpdates() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public Connection getConnection() throws SQLException {
return myConnection;
}
@Override
public boolean supportsSavepoints() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsNamedParameters() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsMultipleOpenResults() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsGetGeneratedKeys() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getAttributes(
String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsResultSetHoldability(int holdability) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getResultSetHoldability() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getDatabaseMajorVersion() throws SQLException {
return Integer.parseInt(VersionUtil.getVersion().split("\\.")[0]);
}
@Override
public int getDatabaseMinorVersion() throws SQLException {
return Integer.parseInt(VersionUtil.getVersion().split("\\.")[1]);
}
@Override
public int getJDBCMajorVersion() throws SQLException {
return Integer.parseInt(VersionUtil.getVersion().split("\\.")[0]);
}
@Override
public int getJDBCMinorVersion() throws SQLException {
return Integer.parseInt(VersionUtil.getVersion().split("\\.")[1]);
}
@Override
public int getSQLStateType() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean locatorsUpdateCopy() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsStatementPooling() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public RowIdLifetime getRowIdLifetime() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean autoCommitFailureClosesAllResultSets() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getClientInfoProperties() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getFunctionColumns(
String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getPseudoColumns(
String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean generatedKeyAlwaysReturned() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public <T> T unwrap(Class<T> theInterface) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean isWrapperFor(Class<?> theInterface) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
}

View File

@ -0,0 +1,117 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import java.io.PrintStream;
import java.sql.*;
import java.util.Properties;
import java.util.logging.Logger;
/**
* This is the JDBC driver class for the HFQL driver. It is intended to be
* imported into a JDBC-compliant database tool, and implements the basic
* functionality required to introspect the "database" and execute queries.
* <p>
* Connections returned by this driver are only semi-stateful. In a normal
* JDBC driver, each connection represents an open and persistent TCP
* connection to the server with shared state between the client and the
* server, but in this driver we keep most of the state in the client. When
* a query is executed it is translated into a FHIR search (with further
* processing on the search results happening in
* {@link ca.uhn.fhir.jpa.fql.executor.HfqlExecutor}).
*/
public class JdbcDriver implements Driver {
private static final JdbcDriver INSTANCE = new JdbcDriver();
public static final String URL_PREFIX = "jdbc:hapifhirql:";
private static boolean ourRegistered;
static {
load();
}
@Override
public Connection connect(String theUrl, Properties theProperties) throws SQLException {
String serverUrl = theUrl.substring(URL_PREFIX.length());
JdbcConnection connection = new JdbcConnection(serverUrl);
connection.setUsername(theProperties.getProperty("user", null));
connection.setPassword(theProperties.getProperty("password", null));
return connection;
}
@Override
public boolean acceptsURL(String theUrl) {
return theUrl.startsWith(URL_PREFIX);
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String theUrl, Properties theInfo) {
return new DriverPropertyInfo[0];
}
@Override
public int getMajorVersion() {
return 1;
}
@Override
public int getMinorVersion() {
return 0;
}
@Override
public boolean jdbcCompliant() {
return false;
}
@Override
public Logger getParentLogger() {
return Logger.getLogger(getClass().getPackageName());
}
public static synchronized Driver load() {
try {
if (!ourRegistered) {
ourRegistered = true;
DriverManager.registerDriver(INSTANCE);
}
} catch (SQLException e) {
logException(e);
}
return INSTANCE;
}
private static void logException(SQLException e) {
PrintStream out = System.out;
e.printStackTrace(out);
}
public static synchronized void unload() {
try {
if (ourRegistered) {
ourRegistered = false;
DriverManager.deregisterDriver(INSTANCE);
}
} catch (SQLException e) {
logException(e);
}
}
}

View File

@ -0,0 +1,276 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import org.hl7.fhir.r4.model.Parameters;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import static ca.uhn.fhir.jpa.fql.jdbc.JdbcConnection.newSqlExceptionForUnsupportedOperation;
class JdbcStatement implements Statement {
private final JdbcConnection myConnection;
private int myMaxRows;
private int myFetchSize = HfqlConstants.DEFAULT_FETCH_SIZE;
private JdbcResultSet myResultSet;
public JdbcStatement(JdbcConnection theConnection) {
myConnection = theConnection;
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
execute(sql);
return getResultSet();
}
@Override
public int executeUpdate(String sql) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void close() {
// ignored
}
@Override
public int getMaxFieldSize() {
return 0;
}
@Override
public void setMaxFieldSize(int max) {
// ignored
}
@Override
public int getMaxRows() {
return myMaxRows;
}
@Override
public void setMaxRows(int theMaxRows) {
myMaxRows = theMaxRows;
}
@Override
public void setEscapeProcessing(boolean enable) {
// ignored
}
@Override
public int getQueryTimeout() {
return 0;
}
@Override
public void setQueryTimeout(int seconds) {
// ignored
}
@Override
public void cancel() {
// ignored
}
@Override
public SQLWarning getWarnings() {
return null;
}
@Override
public void clearWarnings() {
// ignored
}
@Override
public void setCursorName(String name) {
// ignored
}
@Override
public boolean execute(String sql) throws SQLException {
Integer limit = null;
if (getMaxRows() > 0) {
limit = getMaxRows();
}
int fetchSize = myFetchSize;
Parameters input = HfqlRestProvider.newQueryRequestParameters(sql, limit, fetchSize);
IHfqlExecutionResult result = myConnection.getClient().execute(input, true, getFetchSize());
myResultSet = new JdbcResultSet(result, this);
return true;
}
@Override
public ResultSet getResultSet() {
return myResultSet;
}
@Override
public int getUpdateCount() {
return 0;
}
@Override
public boolean getMoreResults() {
return false;
}
@Override
public int getFetchDirection() {
return ResultSet.FETCH_FORWARD;
}
@Override
public void setFetchDirection(int direction) {
// ignored
}
@Override
public int getFetchSize() {
return myFetchSize;
}
@Override
public void setFetchSize(int theFetchSize) {
myFetchSize = theFetchSize;
}
@Override
public int getResultSetConcurrency() {
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getResultSetType() {
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public void addBatch(String sql) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public void clearBatch() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int[] executeBatch() {
return new int[0];
}
@Override
public Connection getConnection() {
return myConnection;
}
@Override
public boolean getMoreResults(int current) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
throw newSqlExceptionForUnsupportedOperation();
}
@Override
public int getResultSetHoldability() {
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public boolean isClosed() {
return false;
}
@Override
public boolean isPoolable() {
return false;
}
@Override
public void setPoolable(boolean thePoolable) {
// ignored
}
@Override
public void closeOnCompletion() {
// ignored
}
@Override
public boolean isCloseOnCompletion() {
return false;
}
@Override
public <T> T unwrap(Class<T> theInterface) {
return null;
}
@Override
public boolean isWrapperFor(Class<?> theInterface) {
return false;
}
}

View File

@ -0,0 +1,302 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.IoUtil;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.ValidateUtil;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.Validate;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import static ca.uhn.fhir.jpa.fql.util.HfqlConstants.PROTOCOL_VERSION;
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This implementation of {@link IHfqlExecutionResult} is intended to be used within
* a remote client (ie a JDBC driver). It executes a call to a FHIR server, executing
* the {@link ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider#executeFql(IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, IPrimitiveType, RequestDetails, HttpServletResponse)}
* operation, parses the response and returns it.
*
* @see IHfqlExecutionResult for more information about the purpose of this class
*/
public class RemoteHfqlExecutionResult implements IHfqlExecutionResult {
private final boolean mySupportsContinuations;
private final String myBaseUrl;
private final CloseableHttpClient myClient;
private final int myFetchSize;
private String mySearchId;
private int myLimit;
private InputStreamReader myReader;
private Iterator<CSVRecord> myIterator;
private int myCurrentFetchCount;
private CloseableHttpResponse myRequest;
private int myLastRowNumber;
private boolean myExhausted;
private HfqlStatement myStatement;
public RemoteHfqlExecutionResult(
Parameters theRequestParameters,
String theBaseUrl,
CloseableHttpClient theClient,
int theFetchSize,
boolean theSupportsContinuations)
throws SQLException {
myBaseUrl = theBaseUrl;
myClient = theClient;
myFetchSize = theFetchSize;
mySupportsContinuations = theSupportsContinuations;
HttpPost post = new HttpPost(myBaseUrl + "/" + HfqlConstants.HFQL_EXECUTE);
post.setEntity(new ResourceEntity(FhirContext.forR4Cached(), theRequestParameters));
try {
myRequest = myClient.execute(post);
validateResponse();
myReader = new InputStreamReader(myRequest.getEntity().getContent(), StandardCharsets.UTF_8);
CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT);
myIterator = csvParser.iterator();
readHeaderRows(true);
} catch (IOException e) {
throw new SQLException(Msg.code(2400) + e.getMessage(), e);
}
}
public RemoteHfqlExecutionResult(Parameters theRequestParameters, IGenericClient theClient) throws IOException {
myBaseUrl = null;
myClient = null;
myFetchSize = 100;
mySupportsContinuations = false;
Binary response = theClient
.operation()
.onServer()
.named(HfqlConstants.HFQL_EXECUTE)
.withParameters(theRequestParameters)
.returnResourceType(Binary.class)
.execute();
String contentType = defaultIfBlank(response.getContentType(), "");
if (contentType.contains(";")) {
contentType = contentType.substring(0, contentType.indexOf(';'));
}
contentType = contentType.trim();
Validate.isTrue(Constants.CT_TEXT_CSV.equals(contentType), "Unexpected content-type: %s", contentType);
myReader = new InputStreamReader(new ByteArrayInputStream(response.getContent()), StandardCharsets.UTF_8);
CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT);
myIterator = csvParser.iterator();
readHeaderRows(true);
}
private void validateResponse() {
Validate.isTrue(
myRequest.getStatusLine().getStatusCode() == 200,
"Server returned wrong status: %d",
myRequest.getStatusLine().getStatusCode());
}
private void readHeaderRows(boolean theFirstPage) {
// Protocol version
CSVRecord protocolVersionRow = myIterator.next();
String protocolVersion = protocolVersionRow.get(0);
ValidateUtil.isTrueOrThrowInvalidRequest(
PROTOCOL_VERSION.equals(protocolVersion),
"Wrong protocol version, expected %s but got %s",
PROTOCOL_VERSION,
protocolVersion);
// Search ID, Limit, Parsed Statement
CSVRecord searchIdRow = myIterator.next();
mySearchId = searchIdRow.get(0);
myLimit = Integer.parseInt(searchIdRow.get(1));
String statementJsonString = searchIdRow.get(2);
if (theFirstPage && isNotBlank(statementJsonString)) {
myStatement = JsonUtil.deserialize(statementJsonString, HfqlStatement.class);
}
myCurrentFetchCount = 0;
}
@Override
public boolean hasNext() {
if (myExhausted) {
return false;
}
boolean hasNext = myIterator.hasNext();
if (!hasNext && myCurrentFetchCount < myFetchSize) {
myExhausted = true;
close();
} else if (!hasNext) {
close();
if (mySupportsContinuations) {
hasNext = executeContinuationSearch();
}
}
return hasNext;
}
@Override
public Row getNextRow() {
Validate.isTrue(!myExhausted, "Search is exhausted. This is a bug.");
List<Object> columnValues = new ArrayList<>();
boolean first = true;
CSVRecord nextRecord = myIterator.next();
myCurrentFetchCount++;
for (String next : nextRecord) {
if (first) {
first = false;
myLastRowNumber = Integer.parseInt(next);
continue;
}
columnValues.add(next);
}
for (int i = 0; i < columnValues.size(); i++) {
String existingValue = (String) columnValues.get(i);
if (isNotBlank(existingValue)) {
Object newValue = null;
switch (myStatement.getSelectClauses().get(i).getDataType()) {
case STRING:
case JSON:
// No action
break;
case TIME:
// No action (we represent times as strings internally)
break;
case INTEGER:
newValue = Integer.parseInt(existingValue);
break;
case BOOLEAN:
newValue = Boolean.parseBoolean(existingValue);
break;
case DATE:
DateType dateType = new DateType();
dateType.setValueAsString(existingValue);
newValue = dateType.getValue();
break;
case TIMESTAMP:
DateTimeType dateTimeType = new DateTimeType();
dateTimeType.setValueAsString(existingValue);
newValue = dateTimeType.getValue();
break;
case LONGINT:
newValue = Long.parseLong(existingValue);
break;
case DECIMAL:
newValue = new DecimalType(existingValue).getValue();
break;
}
if (newValue != null) {
columnValues.set(i, newValue);
}
} else {
columnValues.set(i, null);
}
}
return new Row(myLastRowNumber, columnValues);
}
private boolean executeContinuationSearch() {
boolean hasNext;
HttpPost post = new HttpPost(myBaseUrl + "/" + HfqlConstants.HFQL_EXECUTE);
Parameters input = new Parameters();
input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION));
input.addParameter(HfqlConstants.PARAM_CONTINUATION, new StringType(mySearchId));
input.addParameter(HfqlConstants.PARAM_OFFSET, new IntegerType(myLastRowNumber + 1));
input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(myLimit));
input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(myFetchSize));
input.addParameter(HfqlConstants.PARAM_STATEMENT, new StringType(JsonUtil.serialize(myStatement, false)));
post.setEntity(new ResourceEntity(FhirContext.forR4Cached(), input));
try {
myRequest = myClient.execute(post);
validateResponse();
myReader = new InputStreamReader(myRequest.getEntity().getContent(), StandardCharsets.UTF_8);
CSVParser csvParser = new CSVParser(myReader, HfqlRestClient.CSV_FORMAT);
myIterator = csvParser.iterator();
readHeaderRows(false);
} catch (IOException e) {
throw new InternalErrorException(Msg.code(2399) + e.getMessage(), e);
}
hasNext = myIterator.hasNext();
return hasNext;
}
@Override
public boolean isClosed() {
return myRequest == null;
}
@Override
public void close() {
IoUtil.closeQuietly(myReader);
IoUtil.closeQuietly(myRequest);
myRequest = null;
}
@Override
public String getSearchId() {
return mySearchId;
}
@Override
public int getLimit() {
return myLimit;
}
@Override
public HfqlStatement getStatement() {
return myStatement;
}
}

View File

@ -0,0 +1,218 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import org.apache.commons.text.WordUtils;
import java.util.Map;
import javax.annotation.Nullable;
import static java.util.Map.entry;
public class HfqlFhirPathParser {
private static final Map<String, HfqlDataTypeEnum> FHIR_DATATYPE_TO_FQL_DATATYPE;
static {
FHIR_DATATYPE_TO_FQL_DATATYPE = Map.ofEntries(
entry("base64Binary", HfqlDataTypeEnum.STRING),
entry("boolean", HfqlDataTypeEnum.BOOLEAN),
entry("canonical", HfqlDataTypeEnum.STRING),
entry("code", HfqlDataTypeEnum.STRING),
entry("date", HfqlDataTypeEnum.DATE),
entry("dateTime", HfqlDataTypeEnum.TIMESTAMP),
entry("decimal", HfqlDataTypeEnum.DECIMAL),
entry("id", HfqlDataTypeEnum.STRING),
entry("instant", HfqlDataTypeEnum.TIMESTAMP),
entry("integer", HfqlDataTypeEnum.INTEGER),
entry("integer64", HfqlDataTypeEnum.LONGINT),
entry("markdown", HfqlDataTypeEnum.STRING),
entry("oid", HfqlDataTypeEnum.STRING),
entry("positiveInt", HfqlDataTypeEnum.INTEGER),
entry("string", HfqlDataTypeEnum.STRING),
entry("time", HfqlDataTypeEnum.TIME),
entry("unsignedInt", HfqlDataTypeEnum.INTEGER),
entry("uri", HfqlDataTypeEnum.STRING),
entry("url", HfqlDataTypeEnum.STRING),
entry("uuid", HfqlDataTypeEnum.STRING),
entry("xhtml", HfqlDataTypeEnum.STRING));
}
private final FhirContext myFhirContext;
/**
* Constructor
*/
public HfqlFhirPathParser(FhirContext theFhirContext) {
myFhirContext = theFhirContext;
}
/**
* Given a FHIRPath expression (and a resource type that it applies to), this
* class tries to determine the {@link HfqlDataTypeEnum HFQL Data Type} that the
* values will be when the expression is resolved. This is not nearly foolproof,
* so it is a best effort determination. If the type is ambiguous or can't be determined,
* this method will return {@link HfqlDataTypeEnum#STRING}.
*/
public HfqlDataTypeEnum determineDatatypeForPath(String theResourceType, String theFhirPath) {
BaseRuntimeElementCompositeDefinition<?> currentElementDefinition =
myFhirContext.getResourceDefinition(theResourceType);
RuntimePrimitiveDatatypeDefinition leafDefinition = null;
HfqlLexer lexer = new HfqlLexer(theFhirPath);
boolean firstToken = true;
boolean potentiallyRepeatableAtCurrentPath = false;
while (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) {
HfqlLexerToken nextToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART);
String nextTokenString = nextToken.getToken();
// If the first token is the resource type, we can ignore that
if (firstToken) {
firstToken = false;
if (nextTokenString.equals(theResourceType)) {
continue;
}
}
if (".".equals(nextTokenString)) {
continue;
}
/*
* If there's a round bracket than this is a function name and not an
* element name. In this case we'll just move on to the next element.
* We're making the naive assumption here that the function is a filtering
* function such as in "Patient.identifier.where(system='http://foo').value"
* so that we can just skip the filter function and continue to navigate
* the element names as though the filter wasn't there. This is probably
* not going to hold true always, but it should be good enough for our
* basic type guessing.
*
* One specific case though that we deal with is the functions that take
* a collection and reduce it to a single element. In that case we assume
* we can't have a collection.
*/
if (nextTokenString.contains("(")) {
String keyword = nextToken.asKeyword();
switch (keyword) {
case "FIRST()":
case "LAST()":
potentiallyRepeatableAtCurrentPath = false;
break;
case "TOINTEGER()":
if (!lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) {
return HfqlDataTypeEnum.INTEGER;
}
break;
}
continue;
}
/*
* If the element has an offset operator (e.g. "name[3]") then
* ignore it since we only care about the elemt name part.
*/
boolean hasArrayIndex = false;
int leftSquareBracketIndex = nextTokenString.indexOf('[');
if (leftSquareBracketIndex != -1 && nextTokenString.endsWith("]")) {
nextTokenString = nextTokenString.substring(0, leftSquareBracketIndex);
hasArrayIndex = true;
}
BaseRuntimeChildDefinition childDefForNode = currentElementDefinition.getChildByName(nextTokenString);
if (childDefForNode == null) {
childDefForNode = currentElementDefinition.getChildByName(nextTokenString + "[x]");
if (childDefForNode != null) {
if (lexer.peekNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)
.getToken()
.equals(".")) {
lexer.consumeNextToken();
}
if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) {
String token = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)
.getToken();
if (token.startsWith("ofType(") && token.endsWith(")")) {
String type = token.substring(7, token.length() - 1);
nextTokenString = nextTokenString + WordUtils.capitalize(type);
}
}
}
}
if (childDefForNode != null) {
if (childDefForNode.getMax() != 1 && !hasArrayIndex) {
potentiallyRepeatableAtCurrentPath = true;
}
if (childDefForNode.getValidChildNames().contains(nextTokenString)) {
BaseRuntimeElementDefinition<?> elementDefForNode = childDefForNode.getChildByName(nextTokenString);
if (elementDefForNode != null) {
if (elementDefForNode instanceof BaseRuntimeElementCompositeDefinition) {
currentElementDefinition = (BaseRuntimeElementCompositeDefinition<?>) elementDefForNode;
continue;
} else if (elementDefForNode instanceof RuntimePrimitiveDatatypeDefinition) {
leafDefinition = (RuntimePrimitiveDatatypeDefinition) elementDefForNode;
continue;
}
}
}
}
break;
}
if (potentiallyRepeatableAtCurrentPath) {
return HfqlDataTypeEnum.JSON;
}
if (leafDefinition != null) {
String typeName = leafDefinition.getName();
return getHfqlDataTypeForFhirType(typeName);
}
return null;
}
static HfqlDataTypeEnum getHfqlDataTypeForFhirType(String theTypeName) {
return FHIR_DATATYPE_TO_FQL_DATATYPE.get(theTypeName);
}
@Nullable
private static String getNextFhirPathPartTokenOrNull(HfqlLexer lexer) {
String finalToken = null;
if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) {
finalToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)
.getToken();
}
if (".".equals(finalToken)) {
if (lexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)) {
finalToken = lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION_PART)
.getToken();
}
}
return finalToken;
}
}

View File

@ -0,0 +1,255 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.apache.commons.lang3.Validate;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nonnull;
import static java.lang.Character.isWhitespace;
/**
* Just a simple lexer used to parse HFQL queries and FHIRPath expressions. The lexer
* returns a stream of tokens and can use different lexing rules depending on the
* {@link HfqlLexerOptions} passed in.
*/
class HfqlLexer {
private final char[] myInput;
private final StringBuilder myBuffer = new StringBuilder();
private int myPosition = 0;
private int myLine = 0;
private int myColumn = 0;
private int myParenDepth = 0;
private LexerState myState = LexerState.INITIAL;
private String myNextToken;
private int myNextTokenLine;
private int myNextTokenColumn;
private int myNextTokenStartPosition;
private HfqlLexerOptions myNextTokenOptions;
public HfqlLexer(String theInput) {
myInput = theInput.toCharArray();
}
/**
* Returns <code>null</code> when no tokens remain
*/
@Nonnull
public HfqlLexerToken getNextToken() {
return getNextToken(HfqlLexerOptions.HFQL_TOKEN);
}
/**
* Returns <code>null</code> when no tokens remain
*/
@Nonnull
public HfqlLexerToken getNextToken(@Nonnull HfqlLexerOptions theOptions) {
lexNextToken(theOptions);
Validate.notBlank(myNextToken, "No next token is available");
HfqlLexerToken token = new HfqlLexerToken(myNextToken, myNextTokenLine, myNextTokenColumn);
myNextToken = null;
return token;
}
private void lexNextToken(@Nonnull HfqlLexerOptions theOptions) {
if (myNextToken != null) {
if (theOptions == myNextTokenOptions) {
// Already have a token, no action needed
return;
} else {
// Rewind because the options have changed
myNextToken = null;
myPosition = myNextTokenStartPosition;
}
}
while (true) {
if (myPosition == myInput.length) {
if (myBuffer.length() > 0) {
if (myState == LexerState.IN_SINGLE_QUOTED_STRING || myParenDepth > 0) {
throw new InvalidRequestException(
Msg.code(2401) + "Unexpected end of string at position " + describePosition());
}
setNextToken(theOptions, myBuffer.toString());
}
return;
}
char nextChar = myInput[myPosition];
handleNextChar(theOptions, nextChar);
if (myNextToken != null) {
return;
}
myPosition++;
if (nextChar == '\n') {
myLine++;
myColumn = 0;
} else if (nextChar != '\r') {
myColumn++;
}
}
}
private void setNextToken(@Nonnull HfqlLexerOptions theOptions, String theNextToken) {
myNextTokenOptions = theOptions;
myNextToken = theNextToken;
myBuffer.setLength(0);
myState = LexerState.INITIAL;
}
private void handleNextChar(@Nonnull HfqlLexerOptions theOptions, final char theNextChar) {
if (theOptions.isSlurpParens()) {
if (theNextChar == '(') {
myParenDepth++;
} else if (theNextChar == ')') {
myParenDepth--;
}
}
switch (myState) {
case INITIAL: {
if (isWhitespace(theNextChar)) {
return;
}
if (theNextChar == '\'') {
myNextTokenLine = myLine;
myNextTokenColumn = myColumn;
myState = LexerState.IN_SINGLE_QUOTED_STRING;
myBuffer.append(theNextChar);
return;
}
if (theOptions.getSingleCharTokenCharacters().contains(theNextChar)) {
myNextTokenStartPosition = myPosition;
setNextToken(theOptions, Character.toString(theNextChar));
myPosition++;
return;
}
if (theOptions.getMultiCharTokenCharacters().contains(theNextChar)) {
myNextTokenStartPosition = myPosition;
myNextTokenOptions = theOptions;
myNextTokenLine = myLine;
myNextTokenColumn = myColumn;
myState = LexerState.IN_TOKEN;
myBuffer.append(theNextChar);
return;
}
break;
}
case IN_TOKEN: {
if (theOptions.getMultiCharTokenCharacters().contains(theNextChar)) {
myBuffer.append(theNextChar);
return;
}
if (myParenDepth > 0) {
myBuffer.append(theNextChar);
return;
}
setNextToken(theOptions, myBuffer.toString());
return;
}
case IN_SINGLE_QUOTED_STRING: {
if (theNextChar == '\'') {
myBuffer.append(theNextChar);
myPosition++;
setNextToken(theOptions, myBuffer.toString());
return;
}
if (theNextChar == '\\') {
if (myPosition < myInput.length - 1) {
char followingChar = myInput[myPosition + 1];
if (followingChar == '\'') {
myBuffer.append(followingChar);
myPosition++;
return;
}
}
}
myBuffer.append(theNextChar);
return;
}
}
throw new DataFormatException(Msg.code(2405) + "Unexpected character at position " + describePosition() + ": '"
+ theNextChar + "' (" + (int) theNextChar + ")");
}
private String describePosition() {
return "[line " + myLine + ", column " + myColumn + "]";
}
public List<String> allTokens() {
return allTokens(HfqlLexerOptions.HFQL_TOKEN);
}
public List<String> allTokens(@Nonnull HfqlLexerOptions theOptions) {
ArrayList<String> retVal = new ArrayList<>();
while (hasNextToken(theOptions)) {
retVal.add(getNextToken(theOptions).toString());
}
return retVal;
}
public boolean hasNextToken(@Nonnull HfqlLexerOptions theOptions) {
lexNextToken(theOptions);
return myNextToken != null;
}
/**
* This method should only be called if there is a token already available
* (meaning that {@link #hasNextToken(HfqlLexerOptions)
* has been called).
*/
public void consumeNextToken() {
Validate.isTrue(myNextToken != null);
myNextToken = null;
}
public HfqlLexerToken peekNextToken(HfqlLexerOptions theOptions) {
lexNextToken(theOptions);
if (myNextToken == null) {
return null;
}
return new HfqlLexerToken(myNextToken, myNextTokenLine, myNextTokenColumn);
}
private enum LexerState {
INITIAL,
IN_SINGLE_QUOTED_STRING,
IN_TOKEN
}
}

View File

@ -0,0 +1,116 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import java.util.Set;
public enum HfqlLexerOptions {
/**
* Standard HFQL tokenization rules for when we're not expecting anything
* more specialized.
*/
HFQL_TOKEN(
Set.of(
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '.', '[', ']', '_'),
Set.of(',', '=', '(', ')', '|', ':', '*'),
false),
/**
* A FHIR search parameter name.
*/
SEARCH_PARAMETER_NAME(
Set.of(
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '_', ':', '.', '-'),
Set.of(),
false),
/**
* A complete FHIRPath expression.
*/
FHIRPATH_EXPRESSION(
Set.of(
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '.', '[', ']', '_', '(', ')', '!', '~', '<', '>', '+', '-'),
Set.of(',', '|', ':', '*', '='),
true),
/**
* Returns individual dot-parts of a FHIRPath expression as individual tokens, and also returns
* dots as separate tokens.
*/
FHIRPATH_EXPRESSION_PART(
Set.of(
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '[', ']', '_', '(', ')', '+', '-'),
Set.of(',', '=', '|', ':', '*', '.'),
true);
private final Set<Character> myMultiCharTokenCharacters;
private final boolean mySlurpParens;
private final Set<Character> mySingleCharTokenCharacters;
HfqlLexerOptions(
Set<Character> theMultiCharTokenCharacters,
Set<Character> theSingleCharTokenCharacters,
boolean theSlurpParens) {
myMultiCharTokenCharacters = theMultiCharTokenCharacters;
mySingleCharTokenCharacters = theSingleCharTokenCharacters;
mySlurpParens = theSlurpParens;
if (mySlurpParens) {
assert myMultiCharTokenCharacters.contains('(');
assert !mySingleCharTokenCharacters.contains('(');
}
}
/**
* These characters are treated as a single character token if they are found
*/
public Set<Character> getSingleCharTokenCharacters() {
return mySingleCharTokenCharacters;
}
/**
* These characters are valid as a part of a multi-character token
*/
public Set<Character> getMultiCharTokenCharacters() {
return myMultiCharTokenCharacters;
}
/**
* If we encounter a ( character in the token, should we grab everything until we find a
* matching ) character, regardless of which characters and whitespace are found between
* the parens?
*/
public boolean isSlurpParens() {
return mySlurpParens;
}
}

View File

@ -0,0 +1,85 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import org.apache.commons.lang3.StringUtils;
import java.util.Locale;
import javax.annotation.Nonnull;
class HfqlLexerToken {
@Nonnull
public final String myToken;
private final int myLine;
private final int myColumn;
HfqlLexerToken(@Nonnull String theToken, int theLine, int theColumn) {
myToken = theToken;
myLine = theLine;
myColumn = theColumn;
}
@Nonnull
String getToken() {
return myToken;
}
int getLine() {
return myLine;
}
int getColumn() {
return myColumn;
}
/**
* Returns the token as a normalized keyword string. Normalization
* returns a capitalized version of the token.
*/
@Nonnull
public String asKeyword() {
return myToken.toUpperCase(Locale.US);
}
@Nonnull
public String asString() {
return myToken;
}
@Nonnull
public String describePosition() {
return "[line=" + getLine() + ", column=" + getColumn() + "]";
}
public boolean isQuotedString() {
return StringUtils.startsWith(myToken, "'") && StringUtils.endsWith(myToken, "'");
}
@Override
public String toString() {
return myToken;
}
public Integer asInteger() throws NumberFormatException {
return Integer.parseInt(getToken());
}
}

View File

@ -0,0 +1,325 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.util.ValidateUtil;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* This class represents a parsed HFQL expression tree. It is useful for
* passing over the wire, but it should not be considered a stable model (in
* other words, don't persist these things long-term).
*/
public class HfqlStatement implements IModelJson {
@JsonProperty("select")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<SelectClause> mySelectClauses = new ArrayList<>();
@JsonProperty("where")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<WhereClause> myWhereClauses = new ArrayList<>();
@JsonProperty("groupBy")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<String> myGroupByClauses = new ArrayList<>();
@JsonProperty("orderBy")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<OrderByClause> myOrderByClauses = new ArrayList<>();
@JsonProperty("fromResourceName")
private String myFromResourceName;
@JsonProperty("limit")
private Integer myLimit;
public List<SelectClause> getSelectClauses() {
return mySelectClauses;
}
public String getFromResourceName() {
return myFromResourceName;
}
public void setFromResourceName(String theFromResourceName) {
myFromResourceName = theFromResourceName;
}
@Nonnull
public SelectClause addSelectClause(@Nonnull String theClause) {
SelectClauseOperator operator = SelectClauseOperator.SELECT;
return addSelectClause(theClause, operator);
}
@Nonnull
public SelectClause addSelectClause(@Nonnull String theClause, @Nonnull SelectClauseOperator operator) {
SelectClause clause = new SelectClause();
clause.setClause(theClause);
clause.setOperator(operator);
mySelectClauses.add(clause);
return clause;
}
public WhereClause addWhereClause() {
WhereClause clause = new WhereClause();
myWhereClauses.add(clause);
return clause;
}
public void addWhereClause(String theLeft, WhereClauseOperatorEnum theOperator) {
WhereClause whereClause = addWhereClause();
whereClause.setLeft(theLeft);
whereClause.setOperator(theOperator);
}
public List<WhereClause> getWhereClauses() {
return myWhereClauses;
}
@Nullable
public Integer getLimit() {
return myLimit;
}
public void setLimit(Integer theLimit) {
myLimit = theLimit;
}
public void addGroupByClause(String theGroupByClause) {
ValidateUtil.isNotBlankOrThrowIllegalArgument(theGroupByClause, "theGroupByClause must not be null or blank");
getGroupByClauses().add(theGroupByClause);
}
public List<String> getGroupByClauses() {
if (myGroupByClauses == null) {
myGroupByClauses = new ArrayList<>();
}
return myGroupByClauses;
}
public boolean hasCountClauses() {
return getSelectClauses().stream().anyMatch(t -> t.getOperator() == SelectClauseOperator.COUNT);
}
public OrderByClause addOrderByClause(String theClause, boolean theAscending) {
ValidateUtil.isNotBlankOrThrowIllegalArgument(theClause, "theClause must not be null or blank");
OrderByClause clause = new OrderByClause();
clause.setClause(theClause);
clause.setAscending(theAscending);
getOrderByClauses().add(clause);
return clause;
}
public List<OrderByClause> getOrderByClauses() {
if (myOrderByClauses == null) {
myGroupByClauses = new ArrayList<>();
}
return myOrderByClauses;
}
public int findSelectClauseIndex(String theClause) {
for (int i = 0; i < getSelectClauses().size(); i++) {
if (theClause.equals(getSelectClauses().get(i).getClause())
|| theClause.equals(getSelectClauses().get(i).getAlias())) {
return i;
}
}
return -1;
}
public boolean hasOrderClause() {
return !getOrderByClauses().isEmpty();
}
public List<String> toSelectedColumnAliases() {
return mySelectClauses.stream().map(SelectClause::getAlias).collect(Collectors.toList());
}
public List<HfqlDataTypeEnum> toSelectedColumnDataTypes() {
return mySelectClauses.stream().map(SelectClause::getDataType).collect(Collectors.toList());
}
public SelectClause addSelectClauseAndAlias(String theSelectClause) {
return addSelectClause(theSelectClause).setAlias(theSelectClause);
}
public enum WhereClauseOperatorEnum {
EQUALS,
IN,
UNARY_BOOLEAN,
SEARCH_MATCH
}
public enum SelectClauseOperator {
SELECT,
COUNT
}
public static class OrderByClause implements IModelJson {
@JsonProperty("clause")
private String myClause;
@JsonProperty("ascending")
private boolean myAscending;
public String getClause() {
return myClause;
}
public void setClause(String theClause) {
myClause = theClause;
}
public boolean isAscending() {
return myAscending;
}
public void setAscending(boolean theAscending) {
myAscending = theAscending;
}
}
public static class SelectClause implements IModelJson {
@JsonProperty("clause")
private String myClause;
@JsonProperty("alias")
private String myAlias;
@JsonProperty("operator")
private SelectClauseOperator myOperator;
@JsonProperty("dataType")
private HfqlDataTypeEnum myDataType;
/**
* Constructor
*/
public SelectClause() {
// nothing
}
/**
* Constructor
*
* @param theClause The clause (will be used as both the clause and the alias)
*/
public SelectClause(String theClause) {
setOperator(SelectClauseOperator.SELECT);
setClause(theClause);
}
public HfqlDataTypeEnum getDataType() {
return myDataType;
}
public SelectClause setDataType(HfqlDataTypeEnum theDataType) {
myDataType = theDataType;
return this;
}
public SelectClauseOperator getOperator() {
return myOperator;
}
public void setOperator(SelectClauseOperator theOperator) {
myOperator = theOperator;
}
public String getAlias() {
return myAlias;
}
public SelectClause setAlias(String theAlias) {
myAlias = theAlias;
return this;
}
public String getClause() {
return myClause;
}
public void setClause(String theClause) {
myClause = theClause;
}
}
public static class WhereClause implements IModelJson {
@JsonProperty("left")
private String myLeft;
@JsonProperty("operator")
private WhereClauseOperatorEnum myOperator;
@JsonProperty("right")
private List<String> myRight = new ArrayList<>();
public WhereClauseOperatorEnum getOperator() {
return myOperator;
}
public void setOperator(WhereClauseOperatorEnum theOperator) {
myOperator = theOperator;
}
public String getLeft() {
return myLeft;
}
public void setLeft(String theLeft) {
myLeft = theLeft;
}
public List<String> getRight() {
return myRight;
}
public void addRight(String theRight) {
myRight.add(theRight);
}
/**
* Returns the {@link #getRight() right} values as raw strings. That
* means that any surrounding quote marks are stripped.
*/
public List<String> getRightAsStrings() {
List<String> retVal = new ArrayList<>();
for (String next : getRight()) {
if (next.startsWith("'")) {
next = next.substring(1, next.length() - 1);
}
retVal.add(next);
}
return retVal;
}
}
}

View File

@ -0,0 +1,588 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.Validate;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class HfqlStatementParser {
public static final String KEYWORD_AND = "AND";
public static final String KEYWORD_WHERE = "WHERE";
public static final String KEYWORD_SELECT = "SELECT";
public static final String KEYWORD_FROM = "FROM";
public static final String KEYWORD_LIMIT = "LIMIT";
public static final String KEYWORD_GROUP = "GROUP";
public static final String KEYWORD_ORDER = "ORDER";
public static final String KEYWORD_TRUE = "TRUE";
public static final String KEYWORD_FALSE = "FALSE";
private static final Set<String> DIRECTIVE_KEYWORDS =
Set.of(KEYWORD_FROM, KEYWORD_GROUP, KEYWORD_LIMIT, KEYWORD_ORDER, KEYWORD_WHERE, KEYWORD_SELECT);
private final HfqlLexer myLexer;
private final FhirContext myFhirContext;
private BaseState myState;
private HfqlStatement myStatement;
public HfqlStatementParser(FhirContext theFhirContext, String theInput) {
myFhirContext = theFhirContext;
myLexer = new HfqlLexer(theInput);
myState = new InitialState();
}
/**
* This method may only be called once for a given instance
*/
public HfqlStatement parse() {
Validate.isTrue(myStatement == null, "Already completed parsing");
myStatement = new HfqlStatement();
while (myLexer.hasNextToken(myState.getLexerOptions())) {
HfqlLexerToken nextToken = myLexer.getNextToken(myState.getLexerOptions());
myState.consume(nextToken);
}
if (isBlank(myStatement.getFromResourceName())) {
throw newExceptionUnexpectedTokenExpectToken(null, KEYWORD_FROM);
}
if (myStatement.getSelectClauses().isEmpty()) {
throw newExceptionUnexpectedTokenExpectToken(null, KEYWORD_SELECT);
}
Set<String> existingAliases = new HashSet<>();
for (HfqlStatement.SelectClause next : myStatement.getSelectClauses()) {
if (isNotBlank(next.getAlias())) {
if (!existingAliases.add(next.getAlias())) {
throw new DataFormatException(Msg.code(2414) + "Duplicate SELECT column alias: "
+ UrlUtil.sanitizeUrlPart(next.getAlias()));
}
}
}
for (HfqlStatement.SelectClause next : myStatement.getSelectClauses()) {
if (isBlank(next.getAlias())) {
String candidateAlias = next.getClause();
int nextSuffix = 2;
while (existingAliases.contains(candidateAlias)) {
candidateAlias = next.getClause() + nextSuffix;
nextSuffix++;
}
existingAliases.add(candidateAlias);
next.setAlias(candidateAlias);
}
}
return myStatement;
}
@Nonnull
private HfqlLexerToken getNextTokenRequired(@Nonnull HfqlLexerOptions theOptions) {
if (!myLexer.hasNextToken(theOptions)) {
throw newExceptionUnexpectedToken(null);
}
return myLexer.getNextToken(theOptions);
}
@Nonnull
private static DataFormatException newExceptionUnexpectedToken(@Nullable HfqlLexerToken theToken) {
return newExceptionUnexpectedTokenExpectDescription(theToken, null);
}
@Nonnull
private static DataFormatException newExceptionUnexpectedTokenExpectToken(
@Nullable HfqlLexerToken theToken, @Nonnull String theExpectedToken) {
return newExceptionUnexpectedTokenExpectDescription(theToken, "\"" + theExpectedToken + "\"");
}
@Nonnull
private static DataFormatException newExceptionUnexpectedTokenExpectDescription(
@Nullable HfqlLexerToken theToken, @Nullable String theExpectedDescription) {
StringBuilder b = new StringBuilder();
b.append("Unexpected ");
if (theToken != null) {
b.append("token");
} else {
b.append("end of stream");
}
if (theExpectedDescription != null) {
b.append(" (expected ");
b.append(theExpectedDescription);
b.append(")");
}
if (theToken != null) {
b.append(" at position ");
b.append(theToken.describePosition());
b.append(": ");
b.append(theToken.getToken());
}
String message = b.toString();
return new DataFormatException(message);
}
@Nonnull
private static DataFormatException newExceptionUnknownResourceType(HfqlLexerToken theToken, String resourceType) {
return new DataFormatException("Invalid FROM statement. Unknown resource type '" + resourceType
+ "' at position: " + theToken.describePosition());
}
private static void validateNotPresent(List<?> theClauses, HfqlLexerToken theKeyword) {
if (!theClauses.isEmpty()) {
throw newExceptionUnexpectedToken(theKeyword);
}
}
private static void validateNotPresent(Object theValue, HfqlLexerToken theKeyword) {
if (theValue != null) {
throw newExceptionUnexpectedToken(theKeyword);
}
}
/**
* No tokens consumed yet
*/
public class InitialState extends BaseRootState {
// nothing
}
/**
* Have consumed a 'from' token but not a resource type yet
*/
public class StateFromStart extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
String resourceType = theToken.asString();
if (!myFhirContext.getResourceTypes().contains(resourceType)) {
throw newExceptionUnknownResourceType(theToken, resourceType);
}
myStatement.setFromResourceName(resourceType);
myState = new StateFromAfter();
}
}
/**
* Have consumed a 'from' token and a resource type
*/
public class StateFromAfter extends BaseRootState {
// nothing
}
/**
* We're in the select statement
*/
public class StateInSelect extends BaseState {
@Nonnull
@Override
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.FHIRPATH_EXPRESSION;
}
@Override
void consume(HfqlLexerToken theToken) {
String asKeyword = theToken.asKeyword();
HfqlStatement.SelectClause clause;
if (asKeyword.startsWith("COUNT(") && asKeyword.endsWith(")")) {
String countClause = theToken.asString().substring("COUNT(".length(), asKeyword.length() - 1);
clause = myStatement.addSelectClause(countClause, HfqlStatement.SelectClauseOperator.COUNT);
clause.setAlias(theToken.getToken());
} else {
String string = theToken.asString();
clause = myStatement.addSelectClause(string);
}
myState = new StateInSelectAfterClause(clause);
}
}
private class StateInSelectAfterClause extends StateSelectAfterClauseFinal {
public StateInSelectAfterClause(HfqlStatement.SelectClause theSelectClause) {
super(theSelectClause);
}
@Override
void consume(HfqlLexerToken theToken) {
if (theToken.getToken().equals(":")) {
HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.FHIRPATH_EXPRESSION);
String clause = nextToken.asString();
String alias = mySelectClause.getClause();
mySelectClause.setAlias(alias);
mySelectClause.setClause(clause);
myState = new StateSelectAfterClauseFinal(mySelectClause);
} else if (theToken.asKeyword().equals("AS")) {
HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN);
String alias = nextToken.asString();
mySelectClause.setAlias(alias);
myState = new StateSelectAfterClauseFinal(mySelectClause);
} else {
super.consume(theToken);
}
}
}
private class StateSelectAfterClauseFinal extends BaseRootState {
protected final HfqlStatement.SelectClause mySelectClause;
private StateSelectAfterClauseFinal(HfqlStatement.SelectClause theSelectClause) {
mySelectClause = theSelectClause;
}
@Nonnull
@Override
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.FHIRPATH_EXPRESSION;
}
@Override
void consume(HfqlLexerToken theToken) {
if (theToken.getToken().equals(",")) {
myState = new StateInSelect();
} else if (!DIRECTIVE_KEYWORDS.contains(theToken.asKeyword())) {
String newClause = mySelectClause.getClause() + " " + theToken.getToken();
mySelectClause.setClause(newClause);
} else {
super.consume(theToken);
}
}
}
private class StateInWhereInitial extends BaseState {
@Nonnull
@Override
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.FHIRPATH_EXPRESSION;
}
@Override
void consume(HfqlLexerToken theToken) {
HfqlStatement.WhereClause whereClause = myStatement.addWhereClause();
String token = theToken.getToken();
whereClause.setLeft(token);
whereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN);
myState = new StateInWhereAfterLeft(whereClause);
}
}
private class StateInWhereAfterLeft extends BaseRootState {
private final HfqlStatement.WhereClause myWhereClause;
public StateInWhereAfterLeft(HfqlStatement.WhereClause theWhereClause) {
myWhereClause = theWhereClause;
}
@Nonnull
@Override
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.FHIRPATH_EXPRESSION;
}
@Override
void consume(HfqlLexerToken theToken) {
if ("=".equals(theToken.getToken())) {
myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.EQUALS);
myState = new StateInWhereAfterOperatorEquals(myWhereClause);
} else if ("IN".equals(theToken.asKeyword())) {
HfqlLexerToken nextToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN);
switch (nextToken.asKeyword()) {
case "(":
myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.IN);
myState = new StateInWhereAfterOperatorIn(myWhereClause);
return;
case "SEARCH_MATCH":
myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH);
HfqlLexerToken argumentsToken = getNextTokenRequired(HfqlLexerOptions.HFQL_TOKEN);
String token = argumentsToken.getToken();
if (!token.equals("(")) {
throw newExceptionUnexpectedTokenExpectToken(theToken, "(");
}
myState = new StateInWhereSearchMatch(myWhereClause);
return;
}
throw newExceptionUnexpectedTokenExpectToken(theToken, "(");
} else {
myWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN);
HfqlLexerToken nextToken = theToken;
if (!KEYWORD_AND.equals(nextToken.asKeyword()) && !DIRECTIVE_KEYWORDS.contains(nextToken.asKeyword())) {
StringBuilder expression = new StringBuilder(myWhereClause.getLeft());
while (true) {
expression.append(' ').append(nextToken.getToken());
if (myLexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION)) {
nextToken = myLexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION);
String nextTokenAsKeyword = nextToken.asKeyword();
if (KEYWORD_AND.equals(nextTokenAsKeyword)
|| DIRECTIVE_KEYWORDS.contains(nextTokenAsKeyword)) {
break;
}
} else {
nextToken = null;
break;
}
}
myWhereClause.setLeft(expression.toString());
}
if (nextToken != null) {
super.consume(nextToken);
}
}
}
}
private class StateInWhereAfterOperatorEquals extends BaseState {
private final HfqlStatement.WhereClause myWhereClause;
public StateInWhereAfterOperatorEquals(HfqlStatement.WhereClause theWhereClause) {
myWhereClause = theWhereClause;
}
@Override
void consume(HfqlLexerToken theToken) {
String token = theToken.getToken();
String keyword = theToken.asKeyword();
if (KEYWORD_TRUE.equals(keyword) || KEYWORD_FALSE.equals(keyword)) {
token = keyword.toLowerCase(Locale.US);
} else if (!theToken.isQuotedString()) {
throw newExceptionUnexpectedTokenExpectDescription(theToken, "quoted string");
}
myWhereClause.addRight(token);
myState = new StateAfterWhere();
}
}
private class StateInWhereAfterOperatorIn extends BaseState {
private final HfqlStatement.WhereClause myWhereClause;
public StateInWhereAfterOperatorIn(HfqlStatement.WhereClause theWhereClause) {
myWhereClause = theWhereClause;
}
@Override
void consume(HfqlLexerToken theToken) {
myWhereClause.addRight(theToken.getToken());
if (myLexer.peekNextToken(getLexerOptions()) != null) {
if (myLexer.peekNextToken(getLexerOptions()).getToken().equals("|")) {
myLexer.consumeNextToken();
return;
} else if (myLexer.peekNextToken(getLexerOptions()).getToken().equals(",")) {
myLexer.consumeNextToken();
return;
} else if (myLexer.peekNextToken(getLexerOptions()).getToken().equals(")")) {
myLexer.consumeNextToken();
myState = new StateAfterWhere();
return;
}
}
throw newExceptionUnexpectedToken(myLexer.peekNextToken(getLexerOptions()));
}
}
private class StateInWhereSearchMatch extends BaseState {
private final HfqlStatement.WhereClause myWhereClause;
public StateInWhereSearchMatch(HfqlStatement.WhereClause theWhereClause) {
myWhereClause = theWhereClause;
}
@Override
void consume(HfqlLexerToken theToken) {
if (")".equals(theToken.getToken())) {
myState = new StateAfterWhere();
} else {
myWhereClause.addRight(theToken.getToken());
HfqlLexerToken nextToken = getNextTokenRequired(getLexerOptions());
if (")".equals(nextToken.getToken())) {
myState = new StateAfterWhere();
} else if (!",".equals(nextToken.getToken())) {
throw newExceptionUnexpectedTokenExpectToken(nextToken, ",");
}
}
}
}
private class StateAfterWhere extends BaseRootState {
@Override
void consume(HfqlLexerToken theToken) {
String keyword = theToken.asKeyword();
if (keyword.equals(KEYWORD_AND)) {
myState = new StateInWhereInitial();
} else {
super.consume(theToken);
}
}
}
private class LimitState extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
try {
myStatement.setLimit(theToken.asInteger());
} catch (NumberFormatException e) {
throw newExceptionUnexpectedTokenExpectDescription(theToken, "integer value");
}
}
}
private abstract class BaseRootState extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
String keyword = theToken.asKeyword();
switch (keyword) {
/*
* Update DIRECTIVE_KEYWORDS if you add new
* keywords here!
*/
case KEYWORD_WHERE:
validateNotPresent(myStatement.getWhereClauses(), theToken);
myState = new StateInWhereInitial();
break;
case KEYWORD_SELECT:
validateNotPresent(myStatement.getSelectClauses(), theToken);
myState = new StateInSelect();
break;
case KEYWORD_FROM:
validateNotPresent(myStatement.getFromResourceName(), theToken);
myState = new StateFromStart();
break;
case KEYWORD_LIMIT:
validateNotPresent(myStatement.getLimit(), theToken);
myState = new LimitState();
break;
case KEYWORD_GROUP:
validateNotPresent(myStatement.getGroupByClauses(), theToken);
myState = new StateGroup();
break;
case KEYWORD_ORDER:
validateNotPresent(myStatement.getOrderByClauses(), theToken);
myState = new OrderState();
break;
default:
if (myStatement.getWhereClauses().isEmpty()) {
throw newExceptionUnexpectedTokenExpectToken(theToken, KEYWORD_SELECT);
} else {
throw newExceptionUnexpectedToken(theToken);
}
}
}
}
private class StateGroup extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
if (!"BY".equals(theToken.asKeyword())) {
throw newExceptionUnexpectedTokenExpectToken(theToken, "BY");
}
myState = new StateGroupBy();
}
}
private class StateGroupBy extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
myStatement.addGroupByClause(theToken.asString());
if (myLexer.hasNextToken(HfqlLexerOptions.HFQL_TOKEN)
&& ","
.equals(myLexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN)
.getToken())) {
myLexer.consumeNextToken();
} else {
myState = new StateAfterGroupBy();
}
}
}
private class StateAfterGroupBy extends BaseRootState {
// nothing
}
private class OrderState extends BaseState {
@Override
void consume(HfqlLexerToken theToken) {
if (!"BY".equals(theToken.asKeyword())) {
throw newExceptionUnexpectedTokenExpectToken(theToken, "BY");
}
myState = new OrderByState();
}
}
private class OrderByState extends BaseState {
@Nonnull
@Override
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.FHIRPATH_EXPRESSION;
}
@Override
void consume(HfqlLexerToken theToken) {
HfqlStatement.OrderByClause clause = myStatement.addOrderByClause(theToken.getToken(), true);
myState = new OrderByAfterState(clause);
}
}
private class OrderByAfterState extends BaseRootState {
private final HfqlStatement.OrderByClause myClause;
public OrderByAfterState(HfqlStatement.OrderByClause theClause) {
myClause = theClause;
}
@Override
void consume(HfqlLexerToken theToken) {
if ("ASC".equals(theToken.asKeyword())) {
myClause.setAscending(true);
} else if ("DESC".equals(theToken.asKeyword())) {
myClause.setAscending(false);
} else if (",".equals(theToken.getToken())) {
myState = new OrderByState();
} else {
super.consume(theToken);
}
}
}
private abstract static class BaseState {
abstract void consume(HfqlLexerToken theToken);
@Nonnull
public HfqlLexerOptions getLexerOptions() {
return HfqlLexerOptions.HFQL_TOKEN;
}
}
}

View File

@ -0,0 +1,226 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.provider;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.util.DatatypeUtil;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.ValidateUtil;
import ca.uhn.fhir.util.VersionUtil;
import org.apache.commons.csv.CSVPrinter;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.io.OutputStreamWriter;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClient.CSV_FORMAT;
import static ca.uhn.fhir.rest.api.Constants.CHARSET_UTF8_CTSUFFIX;
import static ca.uhn.fhir.rest.api.Constants.CT_TEXT_CSV;
import static ca.uhn.fhir.util.DatatypeUtil.toStringValue;
public class HfqlRestProvider {
@Autowired
private IHfqlExecutor myHfqlExecutor;
/**
* Constructor
*/
public HfqlRestProvider() {
this(null);
}
/**
* Constructor
*/
public HfqlRestProvider(IHfqlExecutor theHfqlExecutor) {
myHfqlExecutor = theHfqlExecutor;
}
public IHfqlExecutor getHfqlExecutor() {
return myHfqlExecutor;
}
public void setHfqlExecutor(IHfqlExecutor theHfqlExecutor) {
myHfqlExecutor = theHfqlExecutor;
}
/**
* This function implements the <code>$hfql-execute</code> operation, which is
* the FHIR operation that the HFQL JDBC client uses to talk to the server. All
* communication between the client and the server goes through this operation. The
* response is not FHIR however: Responses from this operation are in CSV format using
* a custom CSV format that is understood by the client. See
* {@link #streamResponseCsv(HttpServletResponse, int, IHfqlExecutionResult, boolean, HfqlStatement)}
* to see how that format works.
*/
@Operation(name = HfqlConstants.HFQL_EXECUTE, manualResponse = true)
public void executeFql(
@OperationParam(name = HfqlConstants.PARAM_ACTION, typeName = "code", min = 0, max = 1)
IPrimitiveType<String> theAction,
@OperationParam(name = HfqlConstants.PARAM_QUERY, typeName = "string", min = 0, max = 1)
IPrimitiveType<String> theQuery,
@OperationParam(name = HfqlConstants.PARAM_STATEMENT, typeName = "string", min = 0, max = 1)
IPrimitiveType<String> theStatement,
@OperationParam(name = HfqlConstants.PARAM_CONTINUATION, typeName = "string", min = 0, max = 1)
IPrimitiveType<String> theContinuation,
@OperationParam(name = HfqlConstants.PARAM_LIMIT, typeName = "integer", min = 0, max = 1)
IPrimitiveType<Integer> theLimit,
@OperationParam(name = HfqlConstants.PARAM_OFFSET, typeName = "integer", min = 0, max = 1)
IPrimitiveType<Integer> theOffset,
@OperationParam(name = HfqlConstants.PARAM_FETCH_SIZE, typeName = "integer", min = 0, max = 1)
IPrimitiveType<Integer> theFetchSize,
@OperationParam(name = HfqlConstants.PARAM_INTROSPECT_TABLE_NAME, typeName = "string", min = 0, max = 1)
IPrimitiveType<String> theIntrospectTableName,
@OperationParam(name = HfqlConstants.PARAM_INTROSPECT_COLUMN_NAME, typeName = "string", min = 0, max = 1)
IPrimitiveType<String> theIntrospectColumnName,
RequestDetails theRequestDetails,
HttpServletResponse theServletResponse)
throws IOException {
String action = toStringValue(theAction);
int fetchSize = parseFetchSize(theFetchSize);
Integer limit = parseLimit(theLimit);
switch (action) {
case HfqlConstants.PARAM_ACTION_SEARCH: {
String query = toStringValue(theQuery);
IHfqlExecutionResult outcome = getHfqlExecutor().executeInitialSearch(query, limit, theRequestDetails);
streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement());
break;
}
case HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION: {
String continuation = toStringValue(theContinuation);
ValidateUtil.isTrueOrThrowInvalidRequest(
theOffset != null && theOffset.hasValue(), "No offset supplied");
int startingOffset = theOffset.getValue();
String statement = DatatypeUtil.toStringValue(theStatement);
ValidateUtil.isNotBlankOrThrowIllegalArgument(statement, "No statement provided");
HfqlStatement statementJson = JsonUtil.deserialize(statement, HfqlStatement.class);
IHfqlExecutionResult outcome = myHfqlExecutor.executeContinuation(
statementJson, continuation, startingOffset, limit, theRequestDetails);
streamResponseCsv(theServletResponse, fetchSize, outcome, false, outcome.getStatement());
break;
}
case HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES: {
IHfqlExecutionResult outcome = myHfqlExecutor.introspectTables();
streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement());
break;
}
case HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS: {
String tableName = toStringValue(theIntrospectTableName);
String columnName = toStringValue(theIntrospectColumnName);
IHfqlExecutionResult outcome = myHfqlExecutor.introspectColumns(tableName, columnName);
streamResponseCsv(theServletResponse, fetchSize, outcome, true, outcome.getStatement());
break;
}
}
}
@Nullable
private static Integer parseLimit(IPrimitiveType<Integer> theLimit) {
Integer limit = null;
if (theLimit != null) {
limit = theLimit.getValue();
}
return limit;
}
private static int parseFetchSize(IPrimitiveType<Integer> theFetchSize) {
int fetchSize = 1000;
if (theFetchSize != null && theFetchSize.getValue() != null) {
fetchSize = theFetchSize.getValue();
}
if (fetchSize == 0) {
fetchSize = HfqlConstants.MAX_FETCH_SIZE;
}
ValidateUtil.isTrueOrThrowInvalidRequest(
fetchSize >= HfqlConstants.MIN_FETCH_SIZE && fetchSize <= HfqlConstants.MAX_FETCH_SIZE,
"Fetch size must be between %d and %d",
HfqlConstants.MIN_FETCH_SIZE,
HfqlConstants.MAX_FETCH_SIZE);
return fetchSize;
}
private static void streamResponseCsv(
HttpServletResponse theServletResponse,
int theFetchSize,
IHfqlExecutionResult theResult,
boolean theInitialPage,
HfqlStatement theStatement)
throws IOException {
theServletResponse.setStatus(200);
theServletResponse.setContentType(CT_TEXT_CSV + CHARSET_UTF8_CTSUFFIX);
try (ServletOutputStream outputStream = theServletResponse.getOutputStream()) {
Appendable out = new OutputStreamWriter(outputStream);
CSVPrinter csvWriter = new CSVPrinter(out, CSV_FORMAT);
csvWriter.printRecords();
// Protocol version
csvWriter.printRecord(HfqlConstants.PROTOCOL_VERSION, "HAPI FHIR " + VersionUtil.getVersion());
// Search ID, Limit, Parsed FQL Statement
String searchId = theResult.getSearchId();
String parsedFqlStatement = "";
if (theInitialPage && theStatement != null) {
parsedFqlStatement = JsonUtil.serialize(theStatement, false);
}
csvWriter.printRecord(searchId, theResult.getLimit(), parsedFqlStatement);
// Print the rows
int recordCount = 0;
while (recordCount++ < theFetchSize && theResult.hasNext()) {
IHfqlExecutionResult.Row nextRow = theResult.getNextRow();
csvWriter.print(nextRow.getRowOffset());
csvWriter.printRecord(nextRow.getRowValues());
}
csvWriter.close(true);
}
}
@Nonnull
public static Parameters newQueryRequestParameters(String sql, Integer limit, int fetchSize) {
Parameters input = new Parameters();
input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH));
input.addParameter(HfqlConstants.PARAM_QUERY, new StringType(sql));
if (limit != null) {
input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(limit));
}
input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(fetchSize));
return input;
}
}

View File

@ -0,0 +1,42 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.provider;
import ca.uhn.fhir.jpa.fql.executor.HfqlExecutor;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
@Configuration
public class HfqlRestProviderCtxConfig {
@Bean
@Lazy
public IHfqlExecutor fqlExecutor() {
return new HfqlExecutor();
}
@Bean
@Lazy
public HfqlRestProvider fqlRestProvider() {
return new HfqlRestProvider();
}
}

View File

@ -0,0 +1,48 @@
/*-
* #%L
* HAPI FHIR JPA Server - HFQL Driver
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.fql.util;
public class HfqlConstants {
public static final String HFQL_EXECUTE = "$hfql-execute";
public static final String PARAM_QUERY = "query";
public static final String PARAM_STATEMENT = "statement";
public static final String PARAM_CONTINUATION = "continuation";
public static final String PARAM_LIMIT = "limit";
public static final String PARAM_OFFSET = "offset";
public static final String PARAM_FETCH_SIZE = "fetchSize";
public static final String PROTOCOL_VERSION = "1";
public static final String PARAM_ACTION = "action";
public static final String PARAM_ACTION_SEARCH = "search";
public static final String PARAM_ACTION_SEARCH_CONTINUATION = "searchContinuation";
public static final String PARAM_ACTION_INTROSPECT_TABLES = "introspectTables";
public static final String PARAM_ACTION_INTROSPECT_COLUMNS = "introspectColumns";
public static final int MIN_FETCH_SIZE = 1;
public static final int DEFAULT_FETCH_SIZE = 1000;
public static final int MAX_FETCH_SIZE = 10000;
public static final String PARAM_INTROSPECT_TABLE_NAME = "introspectTableName";
public static final String PARAM_INTROSPECT_COLUMN_NAME = "introspectColumnName";
/**
* This is the maximum number of results that can be sorted or grouped on
*/
public static final int ORDER_AND_GROUP_LIMIT = 10000;
private HfqlConstants() {}
}

View File

@ -0,0 +1,184 @@
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import ca.uhn.fhir.jpa.fql.executor.StaticHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.util.Base64Utils;
import javax.annotation.Nonnull;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class HfqlRestClientTest {
private static final FhirContext ourCtx = FhirContext.forR4Cached();
private static final String USERNAME = "some-username";
private static final String PASSWORD = "some-password";
private static final HeaderCaptureInterceptor ourHeaderCaptureInterceptor = new HeaderCaptureInterceptor();
@Mock
private IHfqlExecutor myFqlExecutor;
@Mock
private IHfqlExecutionResult myMockFqlResult0;
@Mock
private IHfqlExecutionResult myMockFqlResult1;
@InjectMocks
private static final HfqlRestProvider ourProvider = new HfqlRestProvider();
@RegisterExtension
public static final RestfulServerExtension ourServer = new RestfulServerExtension(ourCtx)
.registerProvider(ourProvider)
.registerInterceptor(ourHeaderCaptureInterceptor);
@Captor
private ArgumentCaptor<String> myStatementCaptor;
@Captor
private ArgumentCaptor<RequestDetails> myRequestDetailsCaptor;
@Captor
private ArgumentCaptor<Integer> myLimitCaptor;
private HfqlRestClient myClient;
@BeforeEach
public void beforeEach() {
ourHeaderCaptureInterceptor.clear();
myClient = new HfqlRestClient(ourServer.getBaseUrl(), USERNAME, PASSWORD);
}
@AfterEach
public void afterEach() {
myClient.close();
}
@Test
public void testExecuteSearchAndContinuation() throws SQLException {
String sql = "from Patient select name.family, name.given where name.family = 'Simpson'";
String searchId = "my-search-id";
HfqlStatement statement = createFakeStatement();
when(myMockFqlResult0.getStatement()).thenReturn(statement);
when(myMockFqlResult0.hasNext()).thenReturn(true, true, true);
when(myMockFqlResult0.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")),
new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge")),
// Fetch size is 2 so this one shouldn't get returned in the first pass
new IHfqlExecutionResult.Row(5, List.of("Simpson", "Maggie"))
);
when(myMockFqlResult0.getSearchId()).thenReturn(searchId);
when(myMockFqlResult0.getLimit()).thenReturn(123);
when(myFqlExecutor.executeInitialSearch(eq(sql), any(), any())).thenReturn(myMockFqlResult0);
when(myMockFqlResult1.getStatement()).thenReturn(statement);
when(myMockFqlResult1.hasNext()).thenReturn(true, true, false);
when(myMockFqlResult1.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(5, List.of("Simpson", "Maggie")),
new IHfqlExecutionResult.Row(7, List.of("Simpson", "Lisa"))
);
when(myMockFqlResult1.getSearchId()).thenReturn(searchId);
when(myMockFqlResult1.getLimit()).thenReturn(123);
when(myFqlExecutor.executeContinuation(any(), eq(searchId), eq(4), eq(123), any())).thenReturn(myMockFqlResult1);
when(myFqlExecutor.executeContinuation(any(), eq(searchId), eq(8), eq(123), any())).thenReturn(new StaticHfqlExecutionResult(searchId));
Parameters input = new Parameters();
input.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH));
input.addParameter(HfqlConstants.PARAM_QUERY, new StringType(sql));
input.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(123));
input.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(2));
IHfqlExecutionResult result = myClient.execute(input, true, 2);
IHfqlExecutionResult.Row nextRow;
assertTrue(result.hasNext());
nextRow = result.getNextRow();
assertEquals(0, nextRow.getRowOffset());
assertThat(nextRow.getRowValues(), contains("Simpson", "Homer"));
assertTrue(result.hasNext());
nextRow = result.getNextRow();
assertEquals(3, nextRow.getRowOffset());
assertThat(nextRow.getRowValues(), contains("Simpson", "Marge"));
assertTrue(result.hasNext());
nextRow = result.getNextRow();
assertEquals(5, nextRow.getRowOffset());
assertThat(nextRow.getRowValues(), contains("Simpson", "Maggie"));
assertTrue(result.hasNext());
nextRow = result.getNextRow();
assertEquals(7, nextRow.getRowOffset());
assertThat(nextRow.getRowValues(), contains("Simpson", "Lisa"));
assertFalse(result.hasNext());
verify(myFqlExecutor, times(1)).executeInitialSearch(myStatementCaptor.capture(), myLimitCaptor.capture(), myRequestDetailsCaptor.capture());
assertEquals(sql, myStatementCaptor.getValue());
String expectedAuthHeader = Constants.HEADER_AUTHORIZATION_VALPREFIX_BASIC + Base64Utils.encodeToString((USERNAME + ":" + PASSWORD).getBytes(StandardCharsets.UTF_8));
String actual = ourHeaderCaptureInterceptor.getCapturedHeaders().get(0).get(Constants.HEADER_AUTHORIZATION).get(0);
assertEquals(expectedAuthHeader, actual);
assertEquals(123, myLimitCaptor.getValue().intValue());
}
@Nonnull
public static HfqlStatement createFakeStatement() {
HfqlStatement statement = new HfqlStatement();
statement.setFromResourceName("Patient");
statement.addSelectClause("name[0].family").setAlias("name[0].family").setDataType(HfqlDataTypeEnum.STRING);
statement.addSelectClause("name[0].given[0]").setAlias("name[0].given[0]").setDataType(HfqlDataTypeEnum.STRING);
return statement;
}
@Interceptor
public static class HeaderCaptureInterceptor {
private final List<Map<String, List<String>>> myCapturedHeaders = new ArrayList<>();
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
public void capture(ServletRequestDetails theServletRequestDetails) {
myCapturedHeaders.add(theServletRequestDetails.getHeaders());
}
public void clear() {
myCapturedHeaders.clear();
}
public List<Map<String, List<String>>> getCapturedHeaders() {
return myCapturedHeaders;
}
}
}

View File

@ -0,0 +1,327 @@
package ca.uhn.fhir.jpa.fql.jdbc;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import com.google.common.collect.Lists;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DateType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.jdbc.UncategorizedSQLException;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.util.Base64Utils;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.List;
import java.util.Map;
import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClientTest.createFakeStatement;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SuppressWarnings({"SqlDialectInspection", "SqlNoDataSourceInspection"})
@ExtendWith(MockitoExtension.class)
public class JdbcDriverTest {
public static final String SOME_USERNAME = "some-username";
public static final String SOME_PASSWORD = "some-password";
private static final FhirContext ourCtx = FhirContext.forR4Cached();
private static final HfqlRestClientTest.HeaderCaptureInterceptor ourHeaderCaptureInterceptor = new HfqlRestClientTest.HeaderCaptureInterceptor();
@Mock
private IHfqlExecutor myFqlExecutor;
@Mock
private IHfqlExecutionResult myMockFqlResult;
@InjectMocks
private HfqlRestProvider myProvider = new HfqlRestProvider();
@RegisterExtension
public RestfulServerExtension myServer = new RestfulServerExtension(ourCtx)
.registerProvider(myProvider)
.registerInterceptor(ourHeaderCaptureInterceptor);
private BasicDataSource myDs;
@BeforeEach
public void beforeEach() throws SQLException {
JdbcDriver.load();
myDs = new BasicDataSource();
myDs.setUrl(JdbcDriver.URL_PREFIX + myServer.getBaseUrl());
myDs.setUsername(SOME_USERNAME);
myDs.setPassword(SOME_PASSWORD);
myDs.start();
ourHeaderCaptureInterceptor.clear();
}
@AfterEach
public void afterEach() throws SQLException {
myDs.close();
JdbcDriver.unload();
}
@Test
public void testExecuteStatement() {
HfqlStatement statement = createFakeStatement();
when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")),
new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge"))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(999);
String input = """
from Patient
select name.family, name.given
""";
JdbcTemplate jdbcTemplate = new JdbcTemplate(myDs);
List<Map<String, Object>> outcome = jdbcTemplate.query(input, new ColumnMapRowMapper());
assertEquals(2, outcome.size());
String expectedAuthHeader = Constants.HEADER_AUTHORIZATION_VALPREFIX_BASIC + Base64Utils.encodeToString((SOME_USERNAME + ":" + SOME_PASSWORD).getBytes(StandardCharsets.UTF_8));
String actual = ourHeaderCaptureInterceptor.getCapturedHeaders().get(0).get(Constants.HEADER_AUTHORIZATION).get(0);
assertEquals(expectedAuthHeader, actual);
}
@Test
public void testExecuteStatement_ReturnsError() {
String errorMessage = "this is an error!";
HfqlStatement statement = createFakeStatement();
when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.hasNext()).thenReturn(true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(IHfqlExecutionResult.ROW_OFFSET_ERROR, List.of(errorMessage))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(999);
String input = """
from Patient
select name.family, name.given
""";
JdbcTemplate jdbcTemplate = new JdbcTemplate(myDs);
try {
jdbcTemplate.query(input, new ColumnMapRowMapper());
fail();
} catch (UncategorizedSQLException e) {
assertEquals(SQLException.class, e.getCause().getClass());
assertEquals(Msg.code(2395) + "this is an error!", e.getCause().getMessage());
}
}
@Test
public void testDataTypes() throws SQLException {
// Setup
HfqlStatement hfqlStatement = new HfqlStatement();
hfqlStatement.setFromResourceName("Patient");
hfqlStatement.addSelectClauseAndAlias("col.string").setDataType(HfqlDataTypeEnum.STRING);
hfqlStatement.addSelectClauseAndAlias("col.date").setDataType(HfqlDataTypeEnum.DATE);
hfqlStatement.addSelectClauseAndAlias("col.boolean").setDataType(HfqlDataTypeEnum.BOOLEAN);
hfqlStatement.addSelectClauseAndAlias("col.time").setDataType(HfqlDataTypeEnum.TIME);
hfqlStatement.addSelectClauseAndAlias("col.decimal").setDataType(HfqlDataTypeEnum.DECIMAL);
hfqlStatement.addSelectClauseAndAlias("col.integer").setDataType(HfqlDataTypeEnum.INTEGER);
hfqlStatement.addSelectClauseAndAlias("col.longint").setDataType(HfqlDataTypeEnum.LONGINT);
hfqlStatement.addSelectClauseAndAlias("col.timestamp").setDataType(HfqlDataTypeEnum.TIMESTAMP);
when(myMockFqlResult.getStatement()).thenReturn(hfqlStatement);
when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult);
when(myMockFqlResult.hasNext()).thenReturn(true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("a-string", "2023-02-02", "true", "12:23:22", "100.123", "123", "987", "2023-02-12T10:01:02.234Z"))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(999);
String input = """
select col.string, col.date, col.boolean, col.time, col.decimal, col.integer, col.longint, col.timestamp
from Patient
""";
// Test
Connection connection = myDs.getConnection();
Statement statement = connection.createStatement();
assertTrue(statement.execute(input));
ResultSet resultSet = statement.getResultSet();
// Verify
assertTrue(resultSet.next());
assertEquals("a-string", resultSet.getString("col.string"));
assertEquals(new DateType("2023-02-02").getValue(), resultSet.getDate("col.date"));
assertEquals(true, resultSet.getBoolean("col.boolean"));
assertEquals("12:23:22", resultSet.getTime("col.time").toString());
assertEquals(new BigDecimal("100.123"), resultSet.getBigDecimal("col.decimal"));
assertEquals(new BigDecimal("100.123"), resultSet.getBigDecimal("col.decimal", 100));
assertEquals(100.123f, resultSet.getFloat("col.decimal"));
assertEquals(100.123d, resultSet.getDouble("col.decimal"));
assertEquals(123, resultSet.getInt("col.integer"));
assertEquals(987L, resultSet.getLong("col.longint"));
assertEquals(new Timestamp(new DateTimeType("2023-02-12T10:01:02.234Z").getValue().getTime()), resultSet.getTimestamp("col.timestamp"));
// Using getObject
assertEquals("a-string", resultSet.getObject("col.string"));
assertEquals(new DateType("2023-02-02").getValue(), resultSet.getObject("col.date"));
assertEquals(true, resultSet.getObject("col.boolean"));
assertEquals("12:23:22", resultSet.getObject("col.time").toString());
assertEquals(new BigDecimal("100.123"), resultSet.getObject("col.decimal"));
assertEquals(123, resultSet.getObject("col.integer"));
assertEquals(987L, resultSet.getObject("col.longint"));
assertEquals(new Timestamp(new DateTimeType("2023-02-12T10:01:02.234Z").getValue().getTime()), resultSet.getObject("col.timestamp"));
assertThrows(SQLException.class, () -> resultSet.getString(0));
assertThrows(SQLException.class, () -> resultSet.getString(999));
assertThrows(SQLException.class, () -> resultSet.getString("foo"));
}
@Test
public void testDatatypes_TimestampPrecision() throws SQLException {
// Setup
when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult);
HfqlStatement fakeStatement = createFakeStatement();
fakeStatement.getSelectClauses().clear();
fakeStatement.addSelectClause("col.time").setAlias("col.time").setDataType(HfqlDataTypeEnum.TIME);
when(myMockFqlResult.getStatement()).thenReturn(fakeStatement);
when(myMockFqlResult.hasNext()).thenReturn(true, true, true, true, true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("12:23")),
new IHfqlExecutionResult.Row(1, List.of("12:23:10")),
new IHfqlExecutionResult.Row(2, List.of("12:23:11.0")),
new IHfqlExecutionResult.Row(3, List.of("12:23:12.12")),
new IHfqlExecutionResult.Row(4, List.of("12:23:13.123"))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(999);
String input = "select col.time from Patient";
// Test
Connection connection = myDs.getConnection();
Statement statement = connection.createStatement();
assertTrue(statement.execute(input));
ResultSet resultSet = statement.getResultSet();
// Verify
assertTrue(resultSet.next());
assertEquals("12:23:00", resultSet.getTime("col.time").toString());
assertTrue(resultSet.next());
assertEquals("12:23:10", resultSet.getTime("col.time").toString());
assertTrue(resultSet.next());
assertEquals("12:23:11", resultSet.getTime("col.time").toString());
assertTrue(resultSet.next());
assertEquals("12:23:12", resultSet.getTime("col.time").toString());
assertTrue(resultSet.next());
assertEquals("12:23:13", resultSet.getTime("col.time").toString());
assertFalse(resultSet.next());
verify(myFqlExecutor, times(1)).executeInitialSearch(any(), any(), any());
verify(myFqlExecutor, times(0)).executeContinuation(any(), any(), anyInt(), any(), any());
}
@Test
public void testIntrospectTables() throws SQLException {
when(myFqlExecutor.introspectTables()).thenReturn(myMockFqlResult);
HfqlStatement statement = new HfqlStatement();
statement.addSelectClause("TABLE_NAME").setAlias("TABLE_NAME").setDataType(HfqlDataTypeEnum.STRING);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.hasNext()).thenReturn(true, false);
when(myMockFqlResult.getNextRow()).thenReturn(new IHfqlExecutionResult.Row(0, List.of("Account")));
Connection connection = myDs.getConnection();
DatabaseMetaData metadata = connection.getMetaData();
ResultSet tables = metadata.getTables(null, null, null, null);
assertTrue(tables.isBeforeFirst());
assertTrue(tables.next());
assertFalse(tables.isBeforeFirst());
assertEquals("Account", tables.getString(1));
assertEquals("Account", tables.getString("TABLE_NAME"));
}
@Test
public void testIntrospectColumns() throws SQLException {
when(myFqlExecutor.introspectColumns(any(), any())).thenReturn(myMockFqlResult);
HfqlStatement statement = new HfqlStatement();
statement.addSelectClauseAndAlias("COLUMN_NAME").setDataType(HfqlDataTypeEnum.STRING);
statement.addSelectClauseAndAlias("DATA_TYPE").setDataType(HfqlDataTypeEnum.INTEGER);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, Lists.newArrayList("foo", Types.VARCHAR)),
new IHfqlExecutionResult.Row(1, Lists.newArrayList("bar", null))
);
Connection connection = myDs.getConnection();
DatabaseMetaData metadata = connection.getMetaData();
ResultSet tables = metadata.getColumns(null, null, null, null);
// Row 1
assertTrue(tables.next());
assertEquals("foo", tables.getString(1));
assertEquals("foo", tables.getString("COLUMN_NAME"));
assertFalse(tables.wasNull());
assertEquals(Types.VARCHAR, tables.getInt(2));
assertEquals(Types.VARCHAR, tables.getInt("DATA_TYPE"));
assertFalse(tables.wasNull());
// Row 2
assertTrue(tables.next());
assertEquals("bar", tables.getString(1));
assertEquals("bar", tables.getString("COLUMN_NAME"));
assertEquals(0, tables.getInt(2));
assertEquals(0, tables.getInt("DATA_TYPE"));
assertTrue(tables.wasNull());
// No more rows
assertFalse(tables.next());
// Invalid columns
assertThrows(SQLException.class, () -> tables.getString(0));
assertThrows(SQLException.class, () -> tables.getString(999));
assertThrows(SQLException.class, () -> tables.getString("foo"));
}
@Test
public void testMetadata_ImportedAndExportedKeys() throws SQLException {
Connection connection = myDs.getConnection();
DatabaseMetaData metadata = connection.getMetaData();
assertFalse(metadata.getImportedKeys(null, null, null).next());
assertFalse(metadata.getExportedKeys(null, null, null).next());
}
}

View File

@ -0,0 +1,60 @@
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class HfqlFhirPathParserTest {
@ParameterizedTest
@CsvSource(value = {
// Good
"Patient , Patient.name.family , JSON",
"Patient , Patient.name[0].family , STRING",
"Patient , Patient.name.family[0] , JSON",
"Patient , Patient.name[0].family[0] , STRING",
"Patient , Patient.name.given.getValue().is(System.string) , JSON",
"Patient , Patient.name.given.getValue().is(System.string).first() , STRING",
"Patient , Patient.identifier.where(system='foo').system , JSON",
"Patient , Patient.identifier.where(system='foo').first().system , STRING",
"Observation , Observation.value.ofType(Quantity).value , DECIMAL",
"Patient , name.family , JSON",
"Patient , name[0].family[0] , STRING",
"Patient , name.given.getValue().is(System.string) , JSON",
"Patient , identifier.where(system='foo').system , JSON",
"Patient , identifier[0].where(system='foo').system , STRING",
"Observation , value.ofType(Quantity).value , DECIMAL",
"Patient , Patient.meta.versionId.toInteger() , INTEGER",
"Patient , Patient.identifier , JSON",
// Bad
"Patient , foo , ",
})
void testDetermineDatatypeForPath(String theResourceType, String theFhirPath, HfqlDataTypeEnum theExpectedType) {
HfqlFhirPathParser svc = new HfqlFhirPathParser(FhirContext.forR4Cached());
HfqlDataTypeEnum actual = svc.determineDatatypeForPath(theResourceType, theFhirPath);
assertEquals(theExpectedType, actual);
}
@Test
void testAllFhirDataTypesHaveMappings() {
FhirContext ctx = FhirContext.forR5Cached();
int foundCount = 0;
for (BaseRuntimeElementDefinition<?> next : ctx.getElementDefinitions()) {
if (next instanceof RuntimePrimitiveDatatypeDefinition) {
assertNotNull(HfqlFhirPathParser.getHfqlDataTypeForFhirType(next.getName()), () -> "No mapping for type: " + next.getName());
foundCount++;
}
}
assertEquals(21, foundCount);
}
}

View File

@ -0,0 +1,166 @@
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class HfqlLexerTest {
@Test
void testSimpleStatement() {
String input = """
from Patient
select
name.given[0],
name.family
""";
List<String> allTokens = new HfqlLexer(input).allTokens();
assertThat(allTokens, contains(
"from", "Patient", "select", "name.given[0]", ",", "name.family"
));
}
@Test
void testSelectStar() {
String input = """
from Patient
select
*
""";
List<String> allTokens = new HfqlLexer(input).allTokens();
assertThat(allTokens, contains(
"from", "Patient", "select", "*"
));
}
@Test
void testQuotedString() {
String input = """
from
Patient
where
name.given = 'Foo \\' Chalmers'
select
name.given[0],\s
name.family
""";
List<String> allTokens = new HfqlLexer(input).allTokens();
assertThat(allTokens, contains(
"from", "Patient", "where",
"name.given", "=", "'Foo ' Chalmers'",
"select", "name.given[0]",
",", "name.family"
));
}
@Test
void testSearchParamWithQualifiers() {
String input = """
from
Patient
search
_has:Observation:subject:device.identifier='1234-5'
select
name.family
""";
HfqlLexer hfqlLexer = new HfqlLexer(input);
assertEquals("from", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("Patient", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("search", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("_has:Observation:subject:device.identifier", hfqlLexer.getNextToken(HfqlLexerOptions.SEARCH_PARAMETER_NAME).getToken());
assertEquals("=", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("'1234-5'", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("select", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("name.family", hfqlLexer.getNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
}
@Test
void testInList() {
String input = """
from StructureDefinition
where url in ('foo' | 'bar')
select
Name: name,
URL: url
""";
List<String> allTokens = new HfqlLexer(input).allTokens();
assertThat(allTokens, contains(
"from", "StructureDefinition", "where",
"url", "in", "(", "'foo'", "|", "'bar'", ")",
"select",
"Name", ":", "name", ",",
"URL", ":", "url"
));
}
@Test
void testFhirPathSelector() {
String input = """
from Patient
select
( Observation.value.ofType ( Quantity ) ).unit,
name.family.length()
""";
HfqlLexer lexer = new HfqlLexer(input);
assertEquals("from", lexer.getNextToken().getToken());
assertEquals("Patient", lexer.getNextToken().getToken());
assertEquals("select", lexer.getNextToken().getToken());
assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken());
assertEquals(",", lexer.getNextToken().getToken());
assertEquals("name.family.length()", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken());
}
@Test
void testOptionChangeIsRespected() {
// Setup
String input = """
from Patient
select
( Observation.value.ofType ( Quantity ) ).unit,
name.family.length()
""";
HfqlLexer lexer = new HfqlLexer(input);
assertEquals("from", lexer.getNextToken().getToken());
assertEquals("Patient", lexer.getNextToken().getToken());
assertEquals("select", lexer.getNextToken().getToken());
// Test + Verify
assertEquals("(", lexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.peekNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken());
assertEquals("(", lexer.peekNextToken(HfqlLexerOptions.HFQL_TOKEN).getToken());
assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken());
}
@ParameterizedTest
@CsvSource({
"token1 token2 'token3, HFQL_TOKEN",
"foo.bar(blah, FHIRPATH_EXPRESSION",
"foo.bar((blah.baz), FHIRPATH_EXPRESSION",
})
void testIncompleteFragment_String(String theInput, HfqlLexerOptions theOptions) {
HfqlLexer lexer = new HfqlLexer(theInput);
try {
while (lexer.hasNextToken(theOptions)) {
lexer.consumeNextToken();
}
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), containsString("Unexpected end of string"));
}
}
}

View File

@ -0,0 +1,615 @@
package ca.uhn.fhir.jpa.fql.parser;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.DataFormatException;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.junit.jupiter.api.Assertions.*;
@TestMethodOrder(MethodOrderer.MethodName.class)
@SuppressWarnings("SqlDialectInspection")
public class HfqlStatementParserTest {
private static final FhirContext ourCtx = FhirContext.forR4Cached();
@Test
public void testCountAndGroup() {
String input = """
select Count(*), name.given, name.family
from Patient
group by name.given, name.family
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(3, statement.getSelectClauses().size());
assertEquals("*", statement.getSelectClauses().get(0).getClause());
assertEquals(HfqlStatement.SelectClauseOperator.COUNT, statement.getSelectClauses().get(0).getOperator());
assertEquals("Count(*)", statement.getSelectClauses().get(0).getAlias());
assertEquals("name.given", statement.getSelectClauses().get(1).getClause());
assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(1).getOperator());
assertEquals("name.family", statement.getSelectClauses().get(2).getClause());
assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(2).getOperator());
assertEquals(2, statement.getGroupByClauses().size());
assertThat(statement.getGroupByClauses(), contains("name.given", "name.family"));
}
@Test
public void testFromSelect() {
String input = """
from Patient
select
name.given[0],
name.family
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(2, statement.getSelectClauses().size());
assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause());
assertEquals("name.given[0]", statement.getSelectClauses().get(0).getAlias());
assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(0).getOperator());
assertEquals("name.family", statement.getSelectClauses().get(1).getClause());
assertEquals("name.family", statement.getSelectClauses().get(1).getAlias());
}
@Test
public void testSelect_SearchMatchWithEscapedCommaInArgument() {
String input = """
select name.given
from Patient
where
id in search_match('name', 'A,B\\,B')
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("name.given", statement.getSelectClauses().get(0).getClause());
assertEquals("name.given", statement.getSelectClauses().get(0).getAlias());
assertEquals(HfqlStatement.SelectClauseOperator.SELECT, statement.getSelectClauses().get(0).getOperator());
assertEquals(1, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'name'", "'A,B\\,B'"));
}
@Test
public void testSelect_ValueWithPrefix() {
String input = """
SELECT id
FROM Observation
WHERE
id in search_match('value-quantity', 'lt500') AND
Patient.meta.versionId = '2' AND
value.ofType(string).lower().contains('running')
ORDER BY id DESC
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(3, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'value-quantity'", "'lt500'"));
assertEquals("Patient.meta.versionId", statement.getWhereClauses().get(1).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(1).getOperator());
assertThat(statement.getWhereClauses().get(1).getRight(), contains("'2'"));
assertEquals("value.ofType(string).lower().contains('running')", statement.getWhereClauses().get(2).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(2).getOperator());
assertThat(statement.getWhereClauses().get(2).getRight(), empty());
assertEquals(1, statement.getOrderByClauses().size());
assertEquals("id", statement.getOrderByClauses().get(0).getClause());
assertFalse(statement.getOrderByClauses().get(0).isAscending());
}
@Test
public void testWhere_UnaryBooleanAsLastStatement() {
String input = """
SELECT id
FROM Observation
WHERE
id in search_match('code', 'http://loinc.org|34752-6')
AND
value.ofType(string).lower().contains('running')
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(2, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'code'", "'http://loinc.org|34752-6'"));
assertEquals("value.ofType(string).lower().contains('running')", statement.getWhereClauses().get(1).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(1).getOperator());
assertThat(statement.getWhereClauses().get(1).getRight(), empty());
}
@Test
public void testSelectFrom() {
String input = """
select
name.given[0],
name.family
from Patient
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(2, statement.getSelectClauses().size());
assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause());
assertEquals("name.given[0]", statement.getSelectClauses().get(0).getAlias());
assertEquals("name.family", statement.getSelectClauses().get(1).getClause());
assertEquals("name.family", statement.getSelectClauses().get(1).getAlias());
}
@Test
public void testSelectComplexFhirPath_StringConcat() {
String input = """
SELECT FullName: Patient.name.given + ' ' + Patient.name.family
FROM Patient
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("Patient.name.given + ' ' + Patient.name.family", statement.getSelectClauses().get(0).getClause());
assertEquals("FullName", statement.getSelectClauses().get(0).getAlias());
}
@Test
public void testSelectComplexFhirPath_StringConcat2() {
String input = """
SELECT
COL1: identifier[0].system + '|' + identifier[0].value,
identifier[0].system + '|' + identifier[0].value AS COL2,
identifier[0].system + '|' + identifier[0].value
FROM
Patient
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(3, statement.getSelectClauses().size());
assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(0).getClause());
assertEquals("COL1", statement.getSelectClauses().get(0).getAlias());
assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(1).getClause());
assertEquals("COL2", statement.getSelectClauses().get(1).getAlias());
assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(2).getClause());
assertEquals("identifier[0].system + '|' + identifier[0].value", statement.getSelectClauses().get(2).getAlias());
}
@Test
public void testSelectDuplicateColumnsWithNoAlias() {
String input = """
SELECT
name, name, name
FROM
Patient
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(3, statement.getSelectClauses().size());
assertEquals("name", statement.getSelectClauses().get(0).getClause());
assertEquals("name", statement.getSelectClauses().get(0).getAlias());
assertEquals("name", statement.getSelectClauses().get(1).getClause());
assertEquals("name2", statement.getSelectClauses().get(1).getAlias());
assertEquals("name", statement.getSelectClauses().get(2).getClause());
assertEquals("name3", statement.getSelectClauses().get(2).getAlias());
}
@Test
public void testSelectAs() {
String input = """
SELECT Patient.name.given + ' ' + Patient.name.family as FullName
FROM Patient
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("Patient.name.given + ' ' + Patient.name.family", statement.getSelectClauses().get(0).getClause());
assertEquals("FullName", statement.getSelectClauses().get(0).getAlias());
}
@Test
public void testSelectWhere_GreaterThan() {
String input = """
select id
from Observation
where
value.ofType(Quantity).value > 100
""";
HfqlStatement statement = parse(input);
assertEquals(1, statement.getWhereClauses().size());
assertEquals("value.ofType(Quantity).value > 100", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(0).getOperator());
assertEquals(0, statement.getWhereClauses().get(0).getRight().size());
}
@Test
public void testSelectOrderBy() {
String input = """
select id, name.family
from Observation
order by name.family, count(*)
""";
HfqlStatement statement = parse(input);
assertThat(statement.getSelectClauses().stream().map(t -> t.getAlias()).collect(Collectors.toList()), contains(
"id", "name.family"
));
assertEquals(2, statement.getOrderByClauses().size());
assertEquals("name.family", statement.getOrderByClauses().get(0).getClause());
assertTrue(statement.getOrderByClauses().get(0).isAscending());
assertEquals("count(*)", statement.getOrderByClauses().get(1).getClause());
assertTrue(statement.getOrderByClauses().get(1).isAscending());
}
@Test
public void testSelectOrderBy_Directional() {
String input = """
select id, name.family
from Observation
order by name.family DESC, id ASC
""";
HfqlStatement statement = parse(input);
assertThat(statement.getSelectClauses().stream().map(t -> t.getAlias()).collect(Collectors.toList()), contains(
"id", "name.family"
));
assertEquals(2, statement.getOrderByClauses().size());
assertEquals("name.family", statement.getOrderByClauses().get(0).getClause());
assertFalse(statement.getOrderByClauses().get(0).isAscending());
assertEquals("id", statement.getOrderByClauses().get(1).getClause());
assertTrue(statement.getOrderByClauses().get(1).isAscending());
}
private HfqlStatement parse(String theInput) {
return new HfqlStatementParser(ourCtx, theInput).parse();
}
@Test
public void testFromWhereSelect() {
String input = """
from
Patient
where
name.given = 'Foo \\' Chalmers' and
name.family = 'blah'
select
name.given[0],
name.family
""";
HfqlStatement statement = parse(input);
assertEquals("Patient", statement.getFromResourceName());
assertEquals(2, statement.getSelectClauses().size());
assertEquals("name.given[0]", statement.getSelectClauses().get(0).getClause());
assertEquals("name.family", statement.getSelectClauses().get(1).getClause());
assertEquals(2, statement.getWhereClauses().size());
assertEquals("name.given", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'Foo ' Chalmers'"));
assertEquals("name.family", statement.getWhereClauses().get(1).getLeft());
assertThat(statement.getWhereClauses().get(1).getRight(), contains("'blah'"));
assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(1).getOperator());
}
@Test
public void testFromSearchWhereSelect() {
String input = """
from
Observation
where
subject.name in ('foo' | 'bar')
and
id in search_match('_id', '123')
and
status = 'final'
select
id
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("id", statement.getSelectClauses().get(0).getClause());
assertEquals(3, statement.getWhereClauses().size());
assertEquals("subject.name", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.IN, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'foo'", "'bar'"));
assertEquals("id", statement.getWhereClauses().get(1).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(1).getOperator());
assertThat(statement.getWhereClauses().get(1).getRight(), contains("'_id'", "'123'"));
assertEquals("status", statement.getWhereClauses().get(2).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.EQUALS, statement.getWhereClauses().get(2).getOperator());
assertThat(statement.getWhereClauses().get(2).getRight(), contains("'final'"));
}
@Test
public void testFromWhereSelect_RichSearchExpression() {
String input = """
from
Observation
where
id in search_match('_has:Observation:subject:device.identifier', '1234-5')
select
id
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("id", statement.getSelectClauses().get(0).getClause());
assertEquals(1, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'_has:Observation:subject:device.identifier'", "'1234-5'"));
}
@Test
public void testFromSearchWhereSelectLimit() {
String input = """
from
Observation
where
id in search_match('subject.name', 'foo', 'bar')
and
id in search_match('_id', '123')
and
id in search_match('status', 'final')
select
id
limit 123
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(1, statement.getSelectClauses().size());
assertEquals("id", statement.getSelectClauses().get(0).getClause());
assertEquals(3, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains("'subject.name'", "'foo'", "'bar'"));
assertEquals("id", statement.getWhereClauses().get(1).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(1).getOperator());
assertThat(statement.getWhereClauses().get(1).getRight(), contains("'_id'", "'123'"));
assertEquals("id", statement.getWhereClauses().get(2).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(2).getOperator());
assertThat(statement.getWhereClauses().get(2).getRight(), contains("'status'", "'final'"));
assertEquals(123, statement.getLimit());
}
@Test
public void testNamedSelectClauseWithFhirPath() {
String input = """
select
Weight: value.ofType(Quantity).value,
Unit: value.ofType(Quantity).unit
from Observation
""";
HfqlStatement statement = parse(input);
assertEquals("Observation", statement.getFromResourceName());
assertEquals(2, statement.getSelectClauses().size());
assertEquals("value.ofType(Quantity).value", statement.getSelectClauses().get(0).getClause());
assertEquals("Weight", statement.getSelectClauses().get(0).getAlias());
assertEquals("value.ofType(Quantity).unit", statement.getSelectClauses().get(1).getClause());
assertEquals("Unit", statement.getSelectClauses().get(1).getAlias());
}
@Test
public void testFromWhereSelect_InClauseAndNamedSelects() {
// One select with spaces, one without
String input = """
from
StructureDefinition
where
id in search_match('url', 'foo', 'bar')
select
Name : name,
URL:url
""";
HfqlStatement statement = parse(input);
assertEquals("StructureDefinition", statement.getFromResourceName());
assertEquals(2, statement.getSelectClauses().size());
assertEquals("name", statement.getSelectClauses().get(0).getClause());
assertEquals("Name", statement.getSelectClauses().get(0).getAlias());
assertEquals("url", statement.getSelectClauses().get(1).getClause());
assertEquals("URL", statement.getSelectClauses().get(1).getAlias());
assertEquals(1, statement.getWhereClauses().size());
assertEquals("id", statement.getWhereClauses().get(0).getLeft());
assertEquals(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH, statement.getWhereClauses().get(0).getOperator());
assertThat(statement.getWhereClauses().get(0).getRight(), contains(
"'url'", "'foo'", "'bar'"
));
}
@Test
public void testError_InvalidStart() {
String input = """
blah""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token (expected \"SELECT\") at position [line=0, column=0]: blah", ex.getMessage());
}
@Test
public void testError_DuplicateSelectAliases() {
String input = """
SELECT id as id, name as id
FROM Patient
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("HAPI-2414: Duplicate SELECT column alias: id", ex.getMessage());
}
@Test
public void testError_InvalidOrder() {
String input = """
select id
from Patient
order foo
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token (expected \"BY\") at position [line=2, column=6]: foo", ex.getMessage());
}
@Test
public void testError_InvalidFrom() {
String input = """
from Blah""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Invalid FROM statement. Unknown resource type 'Blah' at position: [line=0, column=5]", ex.getMessage());
}
@Test
public void testError_InvalidLimit() {
String input = """
from Patient
select name.given
limit foo
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token (expected integer value) at position [line=2, column=6]: foo", ex.getMessage());
}
@Test
public void testError_InvalidSelect_EqualsParens() {
String input = """
from
Patient
where
name.given = ('Foo')
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token (expected quoted string) at position [line=3, column=3]: (", ex.getMessage());
}
@Test
public void testError_InvalidSelect_InWithoutParens() {
String input = """
from
Patient
where
name.given in 'Foo'
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token (expected \"(\") at position [line=3, column=14]: in", ex.getMessage());
}
@Test
public void testError_InvalidSelect_InWithoutPipe() {
String input = """
from
Patient
where
name.given in ('foo' 'bar')
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token at position [line=3, column=22]: 'bar'", ex.getMessage());
}
@Test
public void testError_InvalidSelect_InWithoutContent() {
String input = """
from
Patient
where
name.given in
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected end of stream", ex.getMessage());
}
@Test
public void testError_InvalidSelect_InWithoutEnd() {
String input = """
from
Patient
where
name.given in ('foo' | 'bar'
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected end of stream", ex.getMessage());
}
@Test
public void testError_MultipleWhere() {
String input = """
from
Patient
where
_id = '123'
where
name.family = 'Foo'
select
name.given[0],
name.family
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token at position [line=4, column=0]: where", ex.getMessage());
}
@Test
public void testError_MultipleFrom() {
String input = """
from
Patient
select
name.given[0],
name.family
from
Patient
""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected token at position [line=5, column=0]: from", ex.getMessage());
}
@Test
public void testError_NoText() {
String input = " \n ";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected end of stream (expected \"FROM\")", ex.getMessage());
}
@Test
public void testError_MissingSelect() {
String input = """
from Patient where""";
DataFormatException ex = assertThrows(DataFormatException.class, () -> parse(input));
assertEquals("Unexpected end of stream (expected \"SELECT\")", ex.getMessage());
}
}

View File

@ -0,0 +1,239 @@
package ca.uhn.fhir.jpa.fql.provider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.fql.executor.HfqlDataTypeEnum;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutionResult;
import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.test.utilities.HttpClientExtension;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.VersionUtil;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static ca.uhn.fhir.jpa.fql.jdbc.HfqlRestClientTest.createFakeStatement;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.notNull;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.isNull;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class HfqlRestProviderTest {
private static final FhirContext ourCtx = FhirContext.forR4Cached();
private static final Logger ourLog = LoggerFactory.getLogger(HfqlRestProviderTest.class);
@RegisterExtension
public HttpClientExtension myHttpClient = new HttpClientExtension();
@Mock
private IHfqlExecutor myFqlExecutor;
@Mock
private IHfqlExecutionResult myMockFqlResult;
@InjectMocks
private HfqlRestProvider myProvider = new HfqlRestProvider();
@RegisterExtension
public RestfulServerExtension myServer = new RestfulServerExtension(ourCtx)
.registerProvider(myProvider);
@Captor
private ArgumentCaptor<String> myStatementCaptor;
@Captor
private ArgumentCaptor<Integer> myLimitCaptor;
@Captor
private ArgumentCaptor<Integer> myOffsetCaptor;
@Test
public void testExecuteInitialSearch() throws IOException {
// Setup
HfqlStatement statement = createFakeStatement();
when(myFqlExecutor.executeInitialSearch(any(), any(), any())).thenReturn(myMockFqlResult);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("Simpson", "Homer")),
new IHfqlExecutionResult.Row(3, List.of("Simpson", "Marge"))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(999);
String select = "from Patient select foo";
Parameters request = new Parameters();
request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH));
request.addParameter(HfqlConstants.PARAM_QUERY, new StringType(select));
request.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(100));
HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE);
fetch.setEntity(new ResourceEntity(ourCtx, request));
// Test
try (CloseableHttpResponse response = myHttpClient.execute(fetch)) {
// Verify
String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
String expected = """
1,HAPI FHIR THE-VERSION
my-search-id,999,"{""select"":[{""clause"":""name[0].family"",""alias"":""name[0].family"",""operator"":""SELECT"",""dataType"":""STRING""},{""clause"":""name[0].given[0]"",""alias"":""name[0].given[0]"",""operator"":""SELECT"",""dataType"":""STRING""}],""fromResourceName"":""Patient""}"
0,Simpson,Homer
3,Simpson,Marge
""".replace("THE-VERSION", VersionUtil.getVersion());
assertEquals(expected.trim(), outcome.trim());
assertEquals(200, response.getStatusLine().getStatusCode());
assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;"));
verify(myFqlExecutor, times(1)).executeInitialSearch(myStatementCaptor.capture(), myLimitCaptor.capture(), notNull());
assertEquals(select, myStatementCaptor.getValue());
assertEquals(100, myLimitCaptor.getValue());
}
}
@Test
public void testExecuteContinuation() throws IOException {
// Setup
when(myFqlExecutor.executeContinuation(any(), any(), anyInt(), isNull(), any())).thenReturn(myMockFqlResult);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(4, List.of("Simpson", "Homer")),
new IHfqlExecutionResult.Row(6, List.of("Simpson", "Marge"))
);
when(myMockFqlResult.getSearchId()).thenReturn("my-search-id");
when(myMockFqlResult.getLimit()).thenReturn(-1);
String continuation = "the-continuation-id";
Parameters request = new Parameters();
request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_SEARCH_CONTINUATION));
request.addParameter(HfqlConstants.PARAM_CONTINUATION, new StringType(continuation));
request.addParameter(HfqlConstants.PARAM_STATEMENT, new StringType(JsonUtil.serialize(createFakeStatement())));
request.addParameter(HfqlConstants.PARAM_OFFSET, new IntegerType(99));
ourLog.info("Request: {}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(request));
HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE);
fetch.setEntity(new ResourceEntity(ourCtx, request));
// Test
try (CloseableHttpResponse response = myHttpClient.execute(fetch)) {
// Verify
String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
String expected = """
1,HAPI FHIR THE-VERSION
my-search-id,-1,
4,Simpson,Homer
6,Simpson,Marge
""".replace("THE-VERSION", VersionUtil.getVersion());
assertEquals(expected.trim(), outcome.trim());
assertEquals(200, response.getStatusLine().getStatusCode());
assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;"));
verify(myFqlExecutor, times(1)).executeContinuation(any(), myStatementCaptor.capture(), myOffsetCaptor.capture(), myLimitCaptor.capture(), notNull());
assertEquals(continuation, myStatementCaptor.getValue());
assertEquals(null, myLimitCaptor.getValue());
assertEquals(99, myOffsetCaptor.getValue());
}
}
@Test
public void testIntrospectTables() throws IOException {
// Setup
when(myFqlExecutor.introspectTables()).thenReturn(myMockFqlResult);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
HfqlStatement statement = new HfqlStatement();
statement.addSelectClauseAndAlias("TABLE_NAME").setDataType(HfqlDataTypeEnum.STRING);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("Account")),
new IHfqlExecutionResult.Row(6, List.of("Patient"))
);
when(myMockFqlResult.getSearchId()).thenReturn(null);
when(myMockFqlResult.getLimit()).thenReturn(-1);
Parameters request = new Parameters();
request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_TABLES));
HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE);
fetch.setEntity(new ResourceEntity(ourCtx, request));
// Test
try (CloseableHttpResponse response = myHttpClient.execute(fetch)) {
// Verify
String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
String expected = """
1,HAPI FHIR THE-VERSION
,-1,"{""select"":[{""clause"":""TABLE_NAME"",""alias"":""TABLE_NAME"",""operator"":""SELECT"",""dataType"":""STRING""}]}"
0,Account
6,Patient
""".replace("THE-VERSION", VersionUtil.getVersion());
assertEquals(expected.trim(), outcome.trim());
assertEquals(200, response.getStatusLine().getStatusCode());
assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;"));
}
}
@Test
public void testIntrospectColumns() throws IOException {
// Setup
when(myFqlExecutor.introspectColumns(eq("FOO"), eq("BAR"))).thenReturn(myMockFqlResult);
when(myMockFqlResult.hasNext()).thenReturn(true, true, false);
HfqlStatement statement = new HfqlStatement();
statement.addSelectClauseAndAlias("COLUMN_NAME").setDataType(HfqlDataTypeEnum.STRING);
when(myMockFqlResult.getStatement()).thenReturn(statement);
when(myMockFqlResult.getNextRow()).thenReturn(
new IHfqlExecutionResult.Row(0, List.of("FOO")),
new IHfqlExecutionResult.Row(6, List.of("BAR"))
);
when(myMockFqlResult.getSearchId()).thenReturn(null);
when(myMockFqlResult.getLimit()).thenReturn(-1);
Parameters request = new Parameters();
request.addParameter(HfqlConstants.PARAM_ACTION, new CodeType(HfqlConstants.PARAM_ACTION_INTROSPECT_COLUMNS));
request.addParameter(HfqlConstants.PARAM_INTROSPECT_TABLE_NAME, new StringType("FOO"));
request.addParameter(HfqlConstants.PARAM_INTROSPECT_COLUMN_NAME, new StringType("BAR"));
HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE);
fetch.setEntity(new ResourceEntity(ourCtx, request));
// Test
try (CloseableHttpResponse response = myHttpClient.execute(fetch)) {
// Verify
String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
String expected = """
1,HAPI FHIR THE-VERSION
,-1,"{""select"":[{""clause"":""COLUMN_NAME"",""alias"":""COLUMN_NAME"",""operator"":""SELECT"",""dataType"":""STRING""}]}"
0,FOO
6,BAR
""".replace("THE-VERSION", VersionUtil.getVersion());
assertEquals(expected.trim(), outcome.trim());
assertEquals(200, response.getStatusLine().getStatusCode());
assertThat(response.getEntity().getContentType().getValue(), startsWith("text/csv;"));
}
}
}

View File

@ -0,0 +1,14 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
</pattern>
</encoder>
</appender>
<root level="info">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,8 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR JPA Server - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.mdm.svc;
import ca.uhn.fhir.context.FhirContext;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.provider.DiffProvider;
import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider;
@ -71,6 +72,7 @@ public abstract class BaseResourceProviderR5Test extends BaseJpaR5Test {
s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class));
s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class));
s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class));
s.registerProvider(myAppCtx.getBean(HfqlRestProvider.class));
s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class));

View File

@ -0,0 +1,55 @@
package ca.uhn.fhir.jpa.provider.r5;
import ca.uhn.fhir.jpa.fql.util.HfqlConstants;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.hl7.fhir.r5.model.IntegerType;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.StringType;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class ResourceProviderR5FqlTest extends BaseResourceProviderR5Test {
@Test
public void testFqlQuery() throws IOException {
// Setup
for (int i = 0; i < 20; i++) {
createPatient(withActiveTrue(), withIdentifier("foo", "bar"), withFamily("Simpson" + i), withGiven("Homer"));
}
String select = """
select name[0].family, name[0].given[0]
from Patient
where id in search_match('identifier', 'foo|bar')
""";
Parameters request = new Parameters();
request.addParameter(HfqlConstants.PARAM_ACTION, new StringType(HfqlConstants.PARAM_ACTION_SEARCH));
request.addParameter(HfqlConstants.PARAM_QUERY, new StringType(select));
request.addParameter(HfqlConstants.PARAM_LIMIT, new IntegerType(100));
request.addParameter(HfqlConstants.PARAM_FETCH_SIZE, new IntegerType(5));
HttpPost fetch = new HttpPost(myServer.getBaseUrl() + "/" + HfqlConstants.HFQL_EXECUTE);
fetch.setEntity(new ResourceEntity(myFhirContext, request));
// Test
try (CloseableHttpResponse response = ourHttpClient.execute(fetch)) {
// Verify
assertEquals(200, response.getStatusLine().getStatusCode());
String outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
assertThat(outcome, containsString("0,Simpson0,Homer"));
assertThat(outcome, containsString("1,Simpson1,Homer"));
}
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -58,6 +58,11 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.hl7.fhir.testcases</groupId>

View File

@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
import ca.uhn.fhir.jpa.config.HapiJpaConfig;
import ca.uhn.fhir.jpa.config.r5.JpaR5Config;
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProviderCtxConfig;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import ca.uhn.fhir.jpa.topic.SubscriptionTopicConfig;
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
@ -61,7 +62,8 @@ import static org.junit.jupiter.api.Assertions.fail;
SubscriptionTopicConfig.class,
JpaBatch2Config.class,
Batch2JobsConfig.class,
TestHSearchAddInConfig.DefaultLuceneHeap.class
TestHSearchAddInConfig.DefaultLuceneHeap.class,
HfqlRestProviderCtxConfig.class
})
public class TestR5Config {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -59,6 +59,11 @@
<artifactId>hapi-fhir-jpaserver-ips</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-hfql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.helger</groupId>

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider;
@ -250,6 +251,7 @@ public class TestRestfulServer extends RestfulServer {
providers.add(myAppCtx.getBean(JpaSystemProvider.class));
providers.add(myAppCtx.getBean(InstanceReindexProvider.class));
providers.add(myAppCtx.getBean(HfqlRestProvider.class));
/*
* On the DSTU2 endpoint, we want to enable ETag support

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig;
import ca.uhn.fhir.jpa.batch2.JpaBatch2Config;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProviderCtxConfig;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
@ -34,14 +35,13 @@ import org.springframework.context.annotation.Import;
SubscriptionSubmitterConfig.class,
JpaBatch2Config.class,
Batch2JobsConfig.class,
ThreadPoolFactoryConfig.class
ThreadPoolFactoryConfig.class,
HfqlRestProviderCtxConfig.class
})
public class CommonConfig {
/**
* Do some fancy logging to create a nice access log that has details about each incoming request.
*
* @return
*/
@Bean
public LoggingInterceptor accessLoggingInterceptor() {

View File

@ -12,6 +12,8 @@ import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
import org.hl7.fhir.instance.model.api.IIdType;
import java.sql.Driver;
import static ca.uhn.fhirtest.config.TestDstu3Config.FHIR_LUCENE_LOCATION_DSTU3;
public class UhnFhirTestApp {
@ -20,6 +22,8 @@ public class UhnFhirTestApp {
public static void main(String[] args) throws Exception {
org.h2.Driver.load();
int myPort = 8889;
String base = "http://localhost:" + myPort + "/baseR4";

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.blocklist.json;
import ca.uhn.fhir.model.api.IModelJson;

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.blocklist.json;
import ca.uhn.fhir.model.api.IModelJson;

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.blocklist.json;
import ca.uhn.fhir.model.api.IModelJson;

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.blocklist.svc;
import ca.uhn.fhir.mdm.blocklist.json.BlockListJson;

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.blocklist.svc;
import org.hl7.fhir.instance.model.api.IAnyResource;

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.mdm.rules.similarity;
import ca.uhn.fhir.context.FhirContext;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -25,6 +25,7 @@ import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@ -61,6 +62,15 @@ public class SimpleBundleProvider implements IBundleProvider {
this(theList, null);
}
/**
* Constructor
*
* @since 6.8.0
*/
public SimpleBundleProvider(IBaseResource... theList) {
this(Arrays.asList(theList), null);
}
public SimpleBundleProvider(List<? extends IBaseResource> theList, String theUuid) {
myList = theList;
myUuid = theUuid;

View File

@ -159,7 +159,7 @@ public class LoggingInterceptor {
StringLookup lookup = new MyLookup(theServletRequest, theException, theRequestDetails);
StringSubstitutor subs = new StringSubstitutor(lookup, "${", "}", '\\');
// Actuall log the line
// Actually log the line
String line = subs.replace(myErrorMessageFormat);
myLogger.info(line);
}

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-caching-api</artifactId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.7.14-SNAPSHOT</version>
<version>6.7.15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More