Merge branch 'master' into do-20231213-core-bump-6-2-6
This commit is contained in:
commit
833483bf3c
|
@ -1,4 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5452
|
||||
title: "Swapped from using `javax.*` to `jakarta.*` packages. This is a breaking change for a large majority of people who write custom code against HAPI-FHIR. Please see [the migration guide](/docs/interceptors/jakarta_upgrade.md) for more information."
|
||||
title: "Swapped from using `javax.*` to `jakarta.*` packages. This is a breaking change for a large majority of people who write custom code against HAPI-FHIR. Please see [the migration guide](/hapi-fhir/docs/interceptors/jakarta_upgrade.html) for more information."
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
jira: SMILE-7664
|
||||
title: "The HFQL/SQL engine incorrectly parsed expressions containing a `>=` or
|
||||
`<=` comparator in a WHERE clause. This has been corrected. Additionally, the
|
||||
execution engine has been optimized to apply clauses against the `meta.lastUpdated`
|
||||
path more efficiently by using the equivalent search parameter automatically."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5547
|
||||
title: "Previously LIKE queries against resources would perform poorly on PostgreSQL if the database locale/collation was not 'C'.
|
||||
This has been resolved by checking hfj_spidx_string.sp_value_normalized and hfj_spidx_uri.sp_uri column
|
||||
collations during migration and if either or both are non C, create a new btree varchar_pattern_ops on the
|
||||
hash values. If both column collations are 'C', do not create any new indexes."
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 5555
|
||||
title: "Previously, resource body content went into one of 2 columns on the HFJ_RES_VER table:
|
||||
RES_TEXT if the size was above a configurable threshold, or RES_TEXT_VC if it was below that
|
||||
threshold. Performance testing has shown that the latter is always faster, and that on
|
||||
Postgres the use of the latter is particularly problematic since it maps to the
|
||||
largeobject table which isn't the recommended way of storing high frequency objects.
|
||||
The configurable threshold is now ignored, and the latter column is always used. Any legacy
|
||||
data in the former column will still be read however."
|
|
@ -1 +1,18 @@
|
|||
This release contains a large breaking change for authors of interceptors. Internally, HAPI-FHIR has swapped from using `javax.*` to `jakarta.*` packages. Please see [the migration guide](/docs/interceptors/jakarta_upgrade.md) for more information. Without manual intervention, the majority of interceptors will fail at runtime unless they are upgraded.
|
||||
This release contains a large breaking change for authors of interceptors. Internally, HAPI-FHIR has swapped from using `javax.*` to `jakarta.*` packages. Please see [the migration guide](/hapi-fhir/docs/interceptors/jakarta_upgrade.html) for more information. Without manual intervention, the majority of interceptors will fail at runtime unless they are upgraded.
|
||||
|
||||
## Possible New Indexes on PostgresSQL
|
||||
|
||||
* This affects only clients running PostgreSQL who have a locale/collation that is NOT 'C'
|
||||
* For those clients, the migration will detect this condition and add new indexes to:
|
||||
* hfj_spidx_string
|
||||
* hfj_spidx_uri
|
||||
* This is meant to address performance issues for these clients on GET queries whose resulting SQL uses "LIKE" clauses
|
||||
|
||||
These are the new indexes that will be created:
|
||||
|
||||
```sql
|
||||
CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id);
|
||||
```
|
||||
```sql
|
||||
CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id);
|
||||
```
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
|
||||
|
@ -38,7 +37,7 @@ public class HapiFhirH2Dialect extends H2Dialect {
|
|||
|
||||
/**
|
||||
* As of Hibernate 6, generated schemas include a column level check constraint that enforces valid values
|
||||
* for columns that back an Enum type. For example, the column definition for {@link ResourceTable#getFhirVersion()}
|
||||
* for columns that back an Enum type. For example, the column definition for <code>ResourceTable#getFhirVersion()</code>
|
||||
* would look like:
|
||||
* <pre>
|
||||
* RES_VERSION varchar(7) check (RES_VERSION in ('DSTU2','DSTU2_HL7ORG','DSTU2_1','DSTU3','R4','R4B','R5')),
|
|
@ -48,8 +48,10 @@ public final class HapiEntityManagerFactoryUtil {
|
|||
ConfigurableListableBeanFactory myConfigurableListableBeanFactory,
|
||||
FhirContext theFhirContext,
|
||||
JpaStorageSettings theStorageSettings) {
|
||||
|
||||
LocalContainerEntityManagerFactoryBean retVal =
|
||||
new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
|
||||
|
||||
configureEntityManagerFactory(retVal, theFhirContext, theStorageSettings);
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -148,9 +148,7 @@ import org.springframework.transaction.support.TransactionSynchronization;
|
|||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
@ -645,7 +643,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
theEntity.setResourceType(toResourceName(theResource));
|
||||
}
|
||||
|
||||
byte[] resourceBinary;
|
||||
String resourceText;
|
||||
ResourceEncodingEnum encoding;
|
||||
boolean changed = false;
|
||||
|
@ -662,7 +659,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (address != null) {
|
||||
|
||||
encoding = ResourceEncodingEnum.ESR;
|
||||
resourceBinary = null;
|
||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||
changed = true;
|
||||
|
||||
|
@ -680,19 +676,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
HashCode hashCode;
|
||||
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
if (myStorageSettings.getInlineResourceTextBelowSize() > 0
|
||||
&& encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
hashCode = sha256.hashUnencodedChars(encodedResource);
|
||||
} else {
|
||||
resourceText = null;
|
||||
resourceBinary = getResourceBinary(encoding, encodedResource);
|
||||
hashCode = sha256.hashBytes(resourceBinary);
|
||||
}
|
||||
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
|
||||
|
||||
String hashSha256 = hashCode.toString();
|
||||
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
||||
|
@ -710,7 +696,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
} else {
|
||||
|
||||
encoding = null;
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
}
|
||||
|
||||
|
@ -728,7 +713,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
changed = true;
|
||||
}
|
||||
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
encoding = ResourceEncodingEnum.DEL;
|
||||
}
|
||||
|
@ -753,46 +737,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||
changed = true;
|
||||
} else {
|
||||
changed = !Arrays.equals(currentHistoryVersion.getResource(), resourceBinary);
|
||||
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
EncodedResource retVal = new EncodedResource();
|
||||
retVal.setEncoding(encoding);
|
||||
retVal.setResourceBinary(resourceBinary);
|
||||
retVal.setResourceText(resourceText);
|
||||
retVal.setChanged(changed);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* helper for returning the encoded byte array of the input resource string based on the encoding.
|
||||
*
|
||||
* @param encoding the encoding to used
|
||||
* @param encodedResource the resource to encode
|
||||
* @return byte array of the resource
|
||||
*/
|
||||
@Nonnull
|
||||
private byte[] getResourceBinary(ResourceEncodingEnum encoding, String encodedResource) {
|
||||
byte[] resourceBinary;
|
||||
switch (encoding) {
|
||||
case JSON:
|
||||
resourceBinary = encodedResource.getBytes(StandardCharsets.UTF_8);
|
||||
break;
|
||||
case JSONC:
|
||||
resourceBinary = GZipUtil.compress(encodedResource);
|
||||
break;
|
||||
default:
|
||||
case DEL:
|
||||
case ESR:
|
||||
resourceBinary = new byte[0];
|
||||
break;
|
||||
}
|
||||
return resourceBinary;
|
||||
}
|
||||
|
||||
/**
|
||||
* helper to format the meta element for serialization of the resource.
|
||||
*
|
||||
|
@ -1437,8 +1394,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<String> excludeElements = new ArrayList<>(8);
|
||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
byte[] resourceBinary = getResourceBinary(encoding, encodedResourceString);
|
||||
boolean changed = !Arrays.equals(historyEntity.getResource(), resourceBinary);
|
||||
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
|
||||
|
||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
|
||||
|
@ -1450,19 +1406,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return historyEntity;
|
||||
}
|
||||
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0
|
||||
&& encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||
} else {
|
||||
populateEncodedResource(encodedResource, null, resourceBinary, encoding);
|
||||
}
|
||||
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
|
||||
}
|
||||
|
||||
/*
|
||||
* Save the resource itself to the resourceHistoryTable
|
||||
*/
|
||||
historyEntity = myEntityManager.merge(historyEntity);
|
||||
historyEntity.setEncoding(encodedResource.getEncoding());
|
||||
historyEntity.setResource(encodedResource.getResourceBinary());
|
||||
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
||||
myResourceHistoryTableDao.save(historyEntity);
|
||||
|
||||
|
@ -1472,12 +1423,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
private void populateEncodedResource(
|
||||
EncodedResource encodedResource,
|
||||
String encodedResourceString,
|
||||
byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
|
||||
encodedResource.setResourceText(encodedResourceString);
|
||||
encodedResource.setResourceBinary(theResourceBinary);
|
||||
encodedResource.setEncoding(theEncoding);
|
||||
}
|
||||
|
||||
|
@ -1542,7 +1489,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResourceBinary());
|
||||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||
|
||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||
|
|
|
@ -1689,19 +1689,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
||||
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
||||
byte[] resourceBytes = historyEntity.getResource();
|
||||
|
||||
// Always migrate data out of the bytes column
|
||||
if (resourceBytes != null) {
|
||||
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
||||
if (myStorageSettings.getInlineResourceTextBelowSize() > 0
|
||||
&& resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
entity.getResourceId(),
|
||||
historyEntity.getVersion());
|
||||
historyEntity.setResourceTextVc(resourceText);
|
||||
historyEntity.setResource(null);
|
||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
changed = true;
|
||||
}
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
entity.getResourceId(),
|
||||
historyEntity.getVersion());
|
||||
historyEntity.setResourceTextVc(resourceText);
|
||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||
|
@ -2071,6 +2069,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public <PID extends IResourcePersistentId<?>> Stream<PID> searchForIdStream(
|
||||
SearchParameterMap theParams,
|
||||
RequestDetails theRequest,
|
||||
|
|
|
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
|||
class EncodedResource {
|
||||
|
||||
private boolean myChanged;
|
||||
private byte[] myResource;
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
private String myResourceText;
|
||||
|
||||
|
@ -36,14 +35,6 @@ class EncodedResource {
|
|||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
public byte[] getResourceBinary() {
|
||||
return myResource;
|
||||
}
|
||||
|
||||
public void setResourceBinary(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public boolean isChanged() {
|
||||
return myChanged;
|
||||
}
|
||||
|
|
|
@ -79,4 +79,16 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
|
|||
@Modifying
|
||||
@Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
/**
|
||||
* This method is only for use in unit tests - It is used to move the stored resource body contents from the new
|
||||
* <code>RES_TEXT_VC</code> column to the legacy <code>RES_TEXT</code> column, which is where data may have
|
||||
* been stored by versions of HAPI FHIR prior to 7.0.0
|
||||
*
|
||||
* @since 7.0.0
|
||||
*/
|
||||
@Modifying
|
||||
@Query(
|
||||
"UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid")
|
||||
void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid);
|
||||
}
|
||||
|
|
|
@ -62,6 +62,28 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS =
|
||||
new DriverTypeEnum[] {DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012};
|
||||
private static final String QUERY_FOR_COLUMN_COLLATION_TEMPLATE = "WITH defcoll AS (\n"
|
||||
+ " SELECT datcollate AS coll\n"
|
||||
+ " FROM pg_database\n"
|
||||
+ " WHERE datname = current_database())\n"
|
||||
+ ", collation_by_column AS (\n"
|
||||
+ " SELECT a.attname,\n"
|
||||
+ " CASE WHEN c.collname = 'default'\n"
|
||||
+ " THEN defcoll.coll\n"
|
||||
+ " ELSE c.collname\n"
|
||||
+ " END AS my_collation\n"
|
||||
+ " FROM pg_attribute AS a\n"
|
||||
+ " CROSS JOIN defcoll\n"
|
||||
+ " LEFT JOIN pg_collation AS c ON a.attcollation = c.oid\n"
|
||||
+ " WHERE a.attrelid = '%s'::regclass\n"
|
||||
+ " AND a.attnum > 0\n"
|
||||
+ " AND attname = '%s'\n"
|
||||
+ ")\n"
|
||||
+ "SELECT TRUE as result\n"
|
||||
+ "FROM collation_by_column\n"
|
||||
+ "WHERE EXISTS (SELECT 1\n"
|
||||
+ " FROM collation_by_column\n"
|
||||
+ " WHERE my_collation != 'C')";
|
||||
private final Set<FlagEnum> myFlags;
|
||||
|
||||
/**
|
||||
|
@ -141,6 +163,30 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
batch2JobInstanceTable.addColumn("20231128.1", "USER_NAME").nullable().type(ColumnTypeEnum.STRING, 200);
|
||||
|
||||
batch2JobInstanceTable.addColumn("20231128.2", "CLIENT_ID").nullable().type(ColumnTypeEnum.STRING, 200);
|
||||
|
||||
{
|
||||
version.executeRawSql(
|
||||
"20231212.1",
|
||||
"CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_STRING".toLowerCase(),
|
||||
"SP_VALUE_NORMALIZED".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
|
||||
version.executeRawSql(
|
||||
"20231212.2",
|
||||
"CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_URI".toLowerCase(),
|
||||
"SP_URI".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
}
|
||||
}
|
||||
|
||||
protected void init680() {
|
||||
|
@ -1509,11 +1555,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML");
|
||||
nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT);
|
||||
nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY);
|
||||
// - The fk name is generated from Hibernate, have to use this name here
|
||||
// Disabled - superceded by 20220304.33
|
||||
nrmlTable
|
||||
.addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV")
|
||||
.toColumn("RES_ID")
|
||||
.references("HFJ_RESOURCE", "RES_ID");
|
||||
.references("HFJ_RESOURCE", "RES_ID")
|
||||
.doNothing();
|
||||
|
||||
Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY");
|
||||
quantityTable
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class GeneratedSchemaTest {
|
||||
|
||||
/**
|
||||
* Make sure that the RES_TEXT_VC column, which is supposed to be an unlimited-length
|
||||
* string datatype, actually uses an appropriate datatype on the various databases
|
||||
* we care about.
|
||||
*/
|
||||
@Test
|
||||
public void testVerifyLongVarcharColumnDefinition() {
|
||||
validateLongVarcharDatatype("cockroachdb.sql", "varchar(2147483647)");
|
||||
validateLongVarcharDatatype("derby.sql", "clob");
|
||||
validateLongVarcharDatatype("mysql.sql", "longtext");
|
||||
validateLongVarcharDatatype("mariadb.sql", "longtext");
|
||||
|
||||
validateLongVarcharDatatype("h2.sql", "clob");
|
||||
validateLongVarcharDatatype("postgres.sql", "text");
|
||||
validateLongVarcharDatatype("oracle.sql", "clob");
|
||||
validateLongVarcharDatatype("sqlserver.sql", "varchar(max)");
|
||||
|
||||
}
|
||||
|
||||
private static void validateLongVarcharDatatype(String schemaName, String expectedDatatype) {
|
||||
String schema = ClasspathUtil.loadResource("ca/uhn/hapi/fhir/jpa/docs/database/" + schemaName);
|
||||
String[] lines = StringUtils.split(schema, '\n');
|
||||
String resTextVc = Arrays.stream(lines).filter(t -> t.contains("RES_TEXT_VC ")).findFirst().orElseThrow();
|
||||
assertThat("Wrong type in " + schemaName, resTextVc, containsString("RES_TEXT_VC " + expectedDatatype));
|
||||
}
|
||||
|
||||
}
|
|
@ -42,6 +42,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.DateOrListParam;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
import ca.uhn.fhir.rest.param.QualifierDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
|
@ -199,18 +200,77 @@ public class HfqlExecutor implements IHfqlExecutor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the user has included a WHERE clause that has a FHIRPath expression but
|
||||
* could actually be satisfied by a Search Parameter, we'll insert a
|
||||
* search_match expression so that it's more efficient.
|
||||
*/
|
||||
private void massageWhereClauses(HfqlStatement theStatement) {
|
||||
ResourceSearchParams activeSearchParams =
|
||||
mySearchParamRegistry.getActiveSearchParams(theStatement.getFromResourceName());
|
||||
String fromResourceName = theStatement.getFromResourceName();
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(fromResourceName);
|
||||
|
||||
for (HfqlStatement.WhereClause nextWhereClause : theStatement.getWhereClauses()) {
|
||||
|
||||
String left = null;
|
||||
List<String> rightValues = null;
|
||||
String comparator;
|
||||
if (isDataValueWhereClause(nextWhereClause)) {
|
||||
if ("id".equals(nextWhereClause.getLeft())) {
|
||||
left = nextWhereClause.getLeft();
|
||||
comparator = "";
|
||||
rightValues = nextWhereClause.getRightAsStrings();
|
||||
} else if (nextWhereClause.getOperator() == HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN
|
||||
&& nextWhereClause.getRightAsStrings().size() > 1) {
|
||||
left = nextWhereClause.getLeft();
|
||||
rightValues = nextWhereClause
|
||||
.getRightAsStrings()
|
||||
.subList(1, nextWhereClause.getRightAsStrings().size());
|
||||
switch (nextWhereClause.getRightAsStrings().get(0)) {
|
||||
case "=":
|
||||
comparator = "";
|
||||
break;
|
||||
case "<":
|
||||
comparator = ParamPrefixEnum.LESSTHAN.getValue();
|
||||
break;
|
||||
case "<=":
|
||||
comparator = ParamPrefixEnum.LESSTHAN_OR_EQUALS.getValue();
|
||||
break;
|
||||
case ">":
|
||||
comparator = ParamPrefixEnum.GREATERTHAN.getValue();
|
||||
break;
|
||||
case ">=":
|
||||
comparator = ParamPrefixEnum.GREATERTHAN_OR_EQUALS.getValue();
|
||||
break;
|
||||
case "!=":
|
||||
comparator = ParamPrefixEnum.NOT_EQUAL.getValue();
|
||||
break;
|
||||
case "~":
|
||||
comparator = ParamPrefixEnum.APPROXIMATE.getValue();
|
||||
break;
|
||||
default:
|
||||
left = null;
|
||||
comparator = null;
|
||||
rightValues = null;
|
||||
}
|
||||
} else {
|
||||
comparator = null;
|
||||
}
|
||||
|
||||
if (left != null) {
|
||||
if (isFhirPathExpressionEquivalent("id", left, fromResourceName)) {
|
||||
// This is an expression for Resource.id
|
||||
nextWhereClause.setLeft("id");
|
||||
nextWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH);
|
||||
String joinedParamValues = nextWhereClause.getRightAsStrings().stream()
|
||||
.map(ParameterUtil::escape)
|
||||
String joinedParamValues =
|
||||
rightValues.stream().map(ParameterUtil::escape).collect(Collectors.joining(","));
|
||||
nextWhereClause.setRight(Constants.PARAM_ID, joinedParamValues);
|
||||
} else if (isFhirPathExpressionEquivalent("meta.lastUpdated", left, fromResourceName)) {
|
||||
// This is an expression for Resource.meta.lastUpdated
|
||||
nextWhereClause.setLeft("id");
|
||||
nextWhereClause.setOperator(HfqlStatement.WhereClauseOperatorEnum.SEARCH_MATCH);
|
||||
String joinedParamValues = rightValues.stream()
|
||||
.map(value -> comparator + ParameterUtil.escape(value))
|
||||
.collect(Collectors.joining(","));
|
||||
nextWhereClause.setRight("_id", joinedParamValues);
|
||||
nextWhereClause.setRight(Constants.PARAM_LASTUPDATED, joinedParamValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -490,8 +550,12 @@ public class HfqlExecutor implements IHfqlExecutor {
|
|||
}
|
||||
}
|
||||
} catch (FhirPathExecutionException e) {
|
||||
String expression =
|
||||
nextWhereClause.getOperator() == HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN
|
||||
? nextWhereClause.asUnaryExpression()
|
||||
: nextWhereClause.getLeft();
|
||||
throw new InvalidRequestException(Msg.code(2403) + "Unable to evaluate FHIRPath expression \""
|
||||
+ nextWhereClause.getLeft() + "\". Error: " + e.getMessage());
|
||||
+ expression + "\". Error: " + e.getMessage());
|
||||
}
|
||||
|
||||
if (!haveMatch) {
|
||||
|
@ -777,6 +841,17 @@ public class HfqlExecutor implements IHfqlExecutor {
|
|||
return new StaticHfqlExecutionResult(null, columns, dataTypes, rows);
|
||||
}
|
||||
|
||||
private static boolean isFhirPathExpressionEquivalent(
|
||||
String wantedExpression, String actualExpression, String fromResourceName) {
|
||||
if (wantedExpression.equals(actualExpression)) {
|
||||
return true;
|
||||
}
|
||||
if (("Resource." + wantedExpression).equals(actualExpression)) {
|
||||
return true;
|
||||
}
|
||||
return (fromResourceName + "." + wantedExpression).equals(actualExpression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@literal true} if a where clause has an operator of
|
||||
* {@link ca.uhn.fhir.jpa.fql.parser.HfqlStatement.WhereClauseOperatorEnum#EQUALS}
|
||||
|
@ -796,9 +871,10 @@ public class HfqlExecutor implements IHfqlExecutor {
|
|||
private static boolean evaluateWhereClauseUnaryBoolean(
|
||||
HfqlExecutionContext theExecutionContext, IBaseResource r, HfqlStatement.WhereClause theNextWhereClause) {
|
||||
boolean haveMatch = false;
|
||||
assert theNextWhereClause.getRight().isEmpty();
|
||||
List<IPrimitiveType> values =
|
||||
theExecutionContext.evaluate(r, theNextWhereClause.getLeft(), IPrimitiveType.class);
|
||||
|
||||
String fullExpression = theNextWhereClause.asUnaryExpression();
|
||||
|
||||
List<IPrimitiveType> values = theExecutionContext.evaluate(r, fullExpression, IPrimitiveType.class);
|
||||
for (IPrimitiveType<?> nextValue : values) {
|
||||
if (Boolean.TRUE.equals(nextValue.getValue())) {
|
||||
haveMatch = true;
|
||||
|
|
|
@ -139,6 +139,22 @@ class HfqlLexer {
|
|||
return;
|
||||
}
|
||||
|
||||
for (String nextMultiCharToken : theOptions.getMultiCharTokens()) {
|
||||
boolean haveStringStartingHere = true;
|
||||
for (int i = 0; i < nextMultiCharToken.length(); i++) {
|
||||
if (myInput.length <= myPosition + 1
|
||||
|| nextMultiCharToken.charAt(i) != myInput[myPosition + i]) {
|
||||
haveStringStartingHere = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (haveStringStartingHere) {
|
||||
setNextToken(theOptions, nextMultiCharToken);
|
||||
myPosition += nextMultiCharToken.length();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (theNextChar == '\'') {
|
||||
myNextTokenLine = myLine;
|
||||
myNextTokenColumn = myColumn;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.fql.parser;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public enum HfqlLexerOptions {
|
||||
|
@ -28,18 +29,20 @@ public enum HfqlLexerOptions {
|
|||
* more specialized.
|
||||
*/
|
||||
HFQL_TOKEN(
|
||||
List.of(">=", "<=", "!="),
|
||||
Set.of(
|
||||
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
|
||||
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', '.', '[', ']', '_'),
|
||||
Set.of(',', '=', '(', ')', '|', ':', '*'),
|
||||
'8', '9', '.', '[', ']', '_', '~'),
|
||||
Set.of(',', '=', '(', ')', '|', ':', '*', '<', '>', '!'),
|
||||
false),
|
||||
|
||||
/**
|
||||
* A FHIR search parameter name.
|
||||
*/
|
||||
SEARCH_PARAMETER_NAME(
|
||||
List.of(),
|
||||
Set.of(
|
||||
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
|
||||
|
@ -52,12 +55,13 @@ public enum HfqlLexerOptions {
|
|||
* A complete FHIRPath expression.
|
||||
*/
|
||||
FHIRPATH_EXPRESSION(
|
||||
List.of(">=", "<=", "!="),
|
||||
Set.of(
|
||||
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
|
||||
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', '.', '[', ']', '_', '(', ')', '!', '~', '<', '>', '+', '-'),
|
||||
Set.of(',', '|', ':', '*', '='),
|
||||
'8', '9', '.', '[', ']', '_', '(', ')', '+', '-'),
|
||||
Set.of(',', '|', ':', '*', '=', '<', '>', '!', '~'),
|
||||
true),
|
||||
|
||||
/**
|
||||
|
@ -65,22 +69,26 @@ public enum HfqlLexerOptions {
|
|||
* dots as separate tokens.
|
||||
*/
|
||||
FHIRPATH_EXPRESSION_PART(
|
||||
List.of(">=", "<=", "!="),
|
||||
Set.of(
|
||||
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
|
||||
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', '[', ']', '_', '(', ')', '+', '-'),
|
||||
Set.of(',', '=', '|', ':', '*', '.'),
|
||||
Set.of(',', '=', '|', ':', '*', '<', '>', '!', '~', '.'),
|
||||
true);
|
||||
|
||||
private final Set<Character> myMultiCharTokenCharacters;
|
||||
private final boolean mySlurpParens;
|
||||
private final Set<Character> mySingleCharTokenCharacters;
|
||||
private final List<String> myMultiCharTokens;
|
||||
|
||||
HfqlLexerOptions(
|
||||
List<String> theMultiCharTokens,
|
||||
Set<Character> theMultiCharTokenCharacters,
|
||||
Set<Character> theSingleCharTokenCharacters,
|
||||
boolean theSlurpParens) {
|
||||
myMultiCharTokens = theMultiCharTokens;
|
||||
myMultiCharTokenCharacters = theMultiCharTokenCharacters;
|
||||
mySingleCharTokenCharacters = theSingleCharTokenCharacters;
|
||||
mySlurpParens = theSlurpParens;
|
||||
|
@ -91,6 +99,14 @@ public enum HfqlLexerOptions {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* These tokens are always treated as a single token if this string of characters
|
||||
* is found in sequence
|
||||
*/
|
||||
public List<String> getMultiCharTokens() {
|
||||
return myMultiCharTokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* These characters are treated as a single character token if they are found
|
||||
*/
|
||||
|
|
|
@ -32,6 +32,8 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.join;
|
||||
|
||||
/**
|
||||
* This class represents a parsed HFQL expression tree. It is useful for
|
||||
* passing over the wire, but it should not be considered a stable model (in
|
||||
|
@ -327,5 +329,14 @@ public class HfqlStatement implements IModelJson {
|
|||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a concatenation of the {@link #getLeft() left} and all of the {@link #getRight() right} expressions,
|
||||
* each joined by a single string. This is useful for obtaining expressions of
|
||||
* type {@link WhereClauseOperatorEnum#UNARY_BOOLEAN}.
|
||||
*/
|
||||
public String asUnaryExpression() {
|
||||
return getLeft() + " " + join(getRight(), ' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -331,10 +331,9 @@ public class HfqlStatementParser {
|
|||
|
||||
HfqlLexerToken nextToken = theToken;
|
||||
if (!KEYWORD_AND.equals(nextToken.asKeyword()) && !DIRECTIVE_KEYWORDS.contains(nextToken.asKeyword())) {
|
||||
StringBuilder expression = new StringBuilder(myWhereClause.getLeft());
|
||||
while (true) {
|
||||
expression.append(' ').append(nextToken.getToken());
|
||||
myWhereClause.addRight(nextToken.getToken());
|
||||
|
||||
while (true) {
|
||||
if (myLexer.hasNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION)) {
|
||||
nextToken = myLexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION);
|
||||
String nextTokenAsKeyword = nextToken.asKeyword();
|
||||
|
@ -342,13 +341,12 @@ public class HfqlStatementParser {
|
|||
|| DIRECTIVE_KEYWORDS.contains(nextTokenAsKeyword)) {
|
||||
break;
|
||||
}
|
||||
myWhereClause.addRight(nextToken.getToken());
|
||||
} else {
|
||||
nextToken = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
myWhereClause.setLeft(expression.toString());
|
||||
}
|
||||
|
||||
if (nextToken != null) {
|
||||
|
|
|
@ -0,0 +1,175 @@
|
|||
package ca.uhn.fhir.jpa.fql.executor;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.DateType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Spy;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public abstract class BaseHfqlExecutorTest {
|
||||
|
||||
protected final RequestDetails mySrd = new SystemRequestDetails();
|
||||
@Spy
|
||||
protected FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@Mock
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
protected IPagingProvider myPagingProvider;
|
||||
@Spy
|
||||
protected ISearchParamRegistry mySearchParamRegistry = new FhirContextSearchParamRegistry(myCtx);
|
||||
@InjectMocks
|
||||
protected HfqlExecutor myHfqlExecutor = new HfqlExecutor();
|
||||
@Captor
|
||||
protected ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T extends IBaseResource> IFhirResourceDao<T> initDao(Class<T> theType) {
|
||||
IFhirResourceDao<T> retVal = mock(IFhirResourceDao.class);
|
||||
String type = myCtx.getResourceType(theType);
|
||||
when(myDaoRegistry.getResourceDao(type)).thenReturn(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static List<List<Object>> readAllRowValues(IHfqlExecutionResult result) {
|
||||
List<List<Object>> rowValues = new ArrayList<>();
|
||||
while (result.hasNext()) {
|
||||
rowValues.add(new ArrayList<>(result.getNextRow().getRowValues()));
|
||||
}
|
||||
return rowValues;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Observation createCardiologyNoteObservation(String id, String noteText) {
|
||||
Observation obs = new Observation();
|
||||
obs.setId(id);
|
||||
obs.getCode().addCoding()
|
||||
.setSystem("http://loinc.org")
|
||||
.setCode("34752-6");
|
||||
obs.setValue(new StringType(noteText));
|
||||
return obs;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Observation createWeightObservationWithKilos(String obsId, long kg) {
|
||||
Observation obs = new Observation();
|
||||
obs.setId(obsId);
|
||||
obs.getCode().addCoding()
|
||||
.setSystem("http://loinc.org")
|
||||
.setCode("29463-7");
|
||||
obs.setValue(new Quantity(null, kg, "http://unitsofmeasure.org", "kg", "kg"));
|
||||
return obs;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static SimpleBundleProvider createProviderWithSparseNames() {
|
||||
Patient patientNoValues = new Patient();
|
||||
patientNoValues.setActive(true);
|
||||
Patient patientFamilyNameOnly = new Patient();
|
||||
patientFamilyNameOnly.addName().setFamily("Simpson");
|
||||
Patient patientGivenNameOnly = new Patient();
|
||||
patientGivenNameOnly.addName().addGiven("Homer");
|
||||
Patient patientBothNames = new Patient();
|
||||
patientBothNames.addName().setFamily("Simpson").addGiven("Homer");
|
||||
return new SimpleBundleProvider(List.of(
|
||||
patientNoValues, patientFamilyNameOnly, patientGivenNameOnly, patientBothNames));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlanders() {
|
||||
return new SimpleBundleProvider(
|
||||
createPatientHomerSimpson(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientBartSimpson(),
|
||||
createPatientLisaSimpson(),
|
||||
createPatientMaggieSimpson()
|
||||
);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates() {
|
||||
return new SimpleBundleProvider(
|
||||
createPatientHomerSimpson(),
|
||||
createPatientHomerSimpson(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientBartSimpson(),
|
||||
createPatientLisaSimpson(),
|
||||
createPatientMaggieSimpson());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Patient createPatientMaggieSimpson() {
|
||||
Patient maggie = new Patient();
|
||||
maggie.addName().setFamily("Simpson").addGiven("Maggie").addGiven("Evelyn");
|
||||
maggie.addIdentifier().setSystem("http://system").setValue("value4");
|
||||
return maggie;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Patient createPatientLisaSimpson() {
|
||||
Patient lisa = new Patient();
|
||||
lisa.getMeta().setVersionId("1");
|
||||
lisa.addName().setFamily("Simpson").addGiven("Lisa").addGiven("Marie");
|
||||
lisa.addIdentifier().setSystem("http://system").setValue("value3");
|
||||
return lisa;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Patient createPatientBartSimpson() {
|
||||
Patient bart = new Patient();
|
||||
bart.getMeta().setVersionId("3");
|
||||
bart.addName().setFamily("Simpson").addGiven("Bart").addGiven("El Barto");
|
||||
bart.addIdentifier().setSystem("http://system").setValue("value2");
|
||||
return bart;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Patient createPatientNedFlanders() {
|
||||
Patient nedFlanders = new Patient();
|
||||
nedFlanders.getMeta().setVersionId("1");
|
||||
nedFlanders.addName().setFamily("Flanders").addGiven("Ned");
|
||||
nedFlanders.addIdentifier().setSystem("http://system").setValue("value1");
|
||||
return nedFlanders;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
protected static Patient createPatientHomerSimpson() {
|
||||
Patient homer = new Patient();
|
||||
homer.setId("HOMER0");
|
||||
homer.getMeta().setVersionId("2");
|
||||
homer.addName().setFamily("Simpson").addGiven("Homer").addGiven("Jay");
|
||||
homer.addIdentifier().setSystem("http://system").setValue("value0");
|
||||
homer.setBirthDateElement(new DateType("1950-01-01"));
|
||||
return homer;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
package ca.uhn.fhir.jpa.fql.executor;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* We should auto-translate FHIRPath expressions like
|
||||
* <code>id</code> or <code>meta.lastUpdated</code>
|
||||
* to an equivalent search parameter since that's more efficient
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class HfqlExecutorFhirPathTranslationToSearchParamTest extends BaseHfqlExecutorTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
id , true
|
||||
Resource.id , true
|
||||
Resource.id , true
|
||||
foo.id , false
|
||||
"""
|
||||
)
|
||||
public void testId(String theExpression, boolean theShouldConvert) {
|
||||
IFhirResourceDao<Patient> patientDao = initDao(Patient.class);
|
||||
when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders());
|
||||
|
||||
String statement = """
|
||||
SELECT
|
||||
id, birthDate, meta.lastUpdated
|
||||
FROM
|
||||
Patient
|
||||
WHERE
|
||||
id = 'ABC123'
|
||||
""";
|
||||
statement = statement.replace(" id =", " " + theExpression + " =");
|
||||
|
||||
myHfqlExecutor.executeInitialSearch(statement, null, mySrd);
|
||||
|
||||
verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any());
|
||||
SearchParameterMap map = mySearchParameterMapCaptor.getValue();
|
||||
if (theShouldConvert) {
|
||||
assertEquals(1, map.get("_id").size());
|
||||
assertEquals(1, map.get("_id").get(0).size());
|
||||
assertNull(((TokenParam) map.get("_id").get(0).get(0)).getSystem());
|
||||
assertEquals("ABC123", ((TokenParam) map.get("_id").get(0).get(0)).getValue());
|
||||
} else {
|
||||
assertNull(map.get("_id"));
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
meta.lastUpdated = '2023' , 2023 ,
|
||||
meta.lastUpdated > '2023' , 2023 , GREATERTHAN
|
||||
meta.lastUpdated >= '2023' , 2023 , GREATERTHAN_OR_EQUALS
|
||||
meta.lastUpdated < '2023' , 2023 , LESSTHAN
|
||||
meta.lastUpdated <= '2023' , 2023 , LESSTHAN_OR_EQUALS
|
||||
meta.lastUpdated != '2023' , 2023 , NOT_EQUAL
|
||||
meta.lastUpdated ~ '2023' , 2023 , APPROXIMATE
|
||||
"""
|
||||
)
|
||||
public void testLastUpdated(String theExpression, String theExpectedParamValue, ParamPrefixEnum theExpectedParamPrefix) {
|
||||
IFhirResourceDao<Patient> patientDao = initDao(Patient.class);
|
||||
when(patientDao.search(any(), any())).thenReturn(createProviderWithSomeSimpsonsAndFlanders());
|
||||
|
||||
String statement = """
|
||||
SELECT
|
||||
id, birthDate, meta.lastUpdated
|
||||
FROM
|
||||
Patient
|
||||
WHERE
|
||||
meta.lastUpdated = '2023'
|
||||
""";
|
||||
statement = statement.replace("meta.lastUpdated = '2023'", theExpression);
|
||||
|
||||
myHfqlExecutor.executeInitialSearch(statement, null, mySrd);
|
||||
|
||||
verify(patientDao, times(1)).search(mySearchParameterMapCaptor.capture(), any());
|
||||
SearchParameterMap map = mySearchParameterMapCaptor.getValue();
|
||||
assertEquals(1, map.get("_lastUpdated").size());
|
||||
assertEquals(1, map.get("_lastUpdated").get(0).size());
|
||||
assertEquals(theExpectedParamValue, ((DateParam) map.get("_lastUpdated").get(0).get(0)).getValueAsString());
|
||||
assertEquals(theExpectedParamPrefix, ((DateParam) map.get("_lastUpdated").get(0).get(0)).getPrefix());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -59,22 +59,7 @@ import static org.mockito.Mockito.times;
|
|||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class HfqlExecutorTest {
|
||||
|
||||
private final RequestDetails mySrd = new SystemRequestDetails();
|
||||
@Spy
|
||||
private FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
private IPagingProvider myPagingProvider;
|
||||
@Spy
|
||||
private ISearchParamRegistry mySearchParamRegistry = new FhirContextSearchParamRegistry(myCtx);
|
||||
@InjectMocks
|
||||
private HfqlExecutor myHfqlExecutor = new HfqlExecutor();
|
||||
@Captor
|
||||
private ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
|
||||
public class HfqlExecutorTest extends BaseHfqlExecutorTest {
|
||||
|
||||
@Test
|
||||
public void testContinuation() {
|
||||
|
@ -1253,126 +1238,4 @@ public class HfqlExecutorTest {
|
|||
assertErrorMessage(result, "HAPI-2429: Resource type Patient does not have a root element named 'Blah'");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T extends IBaseResource> IFhirResourceDao<T> initDao(Class<T> theType) {
|
||||
IFhirResourceDao<T> retVal = mock(IFhirResourceDao.class);
|
||||
String type = myCtx.getResourceType(theType);
|
||||
when(myDaoRegistry.getResourceDao(type)).thenReturn(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static List<List<Object>> readAllRowValues(IHfqlExecutionResult result) {
|
||||
List<List<Object>> rowValues = new ArrayList<>();
|
||||
while (result.hasNext()) {
|
||||
rowValues.add(new ArrayList<>(result.getNextRow().getRowValues()));
|
||||
}
|
||||
return rowValues;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Observation createCardiologyNoteObservation(String id, String noteText) {
|
||||
Observation obs = new Observation();
|
||||
obs.setId(id);
|
||||
obs.getCode().addCoding()
|
||||
.setSystem("http://loinc.org")
|
||||
.setCode("34752-6");
|
||||
obs.setValue(new StringType(noteText));
|
||||
return obs;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Observation createWeightObservationWithKilos(String obsId, long kg) {
|
||||
Observation obs = new Observation();
|
||||
obs.setId(obsId);
|
||||
obs.getCode().addCoding()
|
||||
.setSystem("http://loinc.org")
|
||||
.setCode("29463-7");
|
||||
obs.setValue(new Quantity(null, kg, "http://unitsofmeasure.org", "kg", "kg"));
|
||||
return obs;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static SimpleBundleProvider createProviderWithSparseNames() {
|
||||
Patient patientNoValues = new Patient();
|
||||
patientNoValues.setActive(true);
|
||||
Patient patientFamilyNameOnly = new Patient();
|
||||
patientFamilyNameOnly.addName().setFamily("Simpson");
|
||||
Patient patientGivenNameOnly = new Patient();
|
||||
patientGivenNameOnly.addName().addGiven("Homer");
|
||||
Patient patientBothNames = new Patient();
|
||||
patientBothNames.addName().setFamily("Simpson").addGiven("Homer");
|
||||
return new SimpleBundleProvider(List.of(
|
||||
patientNoValues, patientFamilyNameOnly, patientGivenNameOnly, patientBothNames));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlanders() {
|
||||
return new SimpleBundleProvider(
|
||||
createPatientHomerSimpson(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientBartSimpson(),
|
||||
createPatientLisaSimpson(),
|
||||
createPatientMaggieSimpson()
|
||||
);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static SimpleBundleProvider createProviderWithSomeSimpsonsAndFlandersWithSomeDuplicates() {
|
||||
return new SimpleBundleProvider(
|
||||
createPatientHomerSimpson(),
|
||||
createPatientHomerSimpson(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientNedFlanders(),
|
||||
createPatientBartSimpson(),
|
||||
createPatientLisaSimpson(),
|
||||
createPatientMaggieSimpson());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Patient createPatientMaggieSimpson() {
|
||||
Patient maggie = new Patient();
|
||||
maggie.addName().setFamily("Simpson").addGiven("Maggie").addGiven("Evelyn");
|
||||
maggie.addIdentifier().setSystem("http://system").setValue("value4");
|
||||
return maggie;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Patient createPatientLisaSimpson() {
|
||||
Patient lisa = new Patient();
|
||||
lisa.getMeta().setVersionId("1");
|
||||
lisa.addName().setFamily("Simpson").addGiven("Lisa").addGiven("Marie");
|
||||
lisa.addIdentifier().setSystem("http://system").setValue("value3");
|
||||
return lisa;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Patient createPatientBartSimpson() {
|
||||
Patient bart = new Patient();
|
||||
bart.getMeta().setVersionId("3");
|
||||
bart.addName().setFamily("Simpson").addGiven("Bart").addGiven("El Barto");
|
||||
bart.addIdentifier().setSystem("http://system").setValue("value2");
|
||||
return bart;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Patient createPatientNedFlanders() {
|
||||
Patient nedFlanders = new Patient();
|
||||
nedFlanders.getMeta().setVersionId("1");
|
||||
nedFlanders.addName().setFamily("Flanders").addGiven("Ned");
|
||||
nedFlanders.addIdentifier().setSystem("http://system").setValue("value1");
|
||||
return nedFlanders;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Patient createPatientHomerSimpson() {
|
||||
Patient homer = new Patient();
|
||||
homer.setId("HOMER0");
|
||||
homer.getMeta().setVersionId("2");
|
||||
homer.addName().setFamily("Simpson").addGiven("Homer").addGiven("Jay");
|
||||
homer.addIdentifier().setSystem("http://system").setValue("value0");
|
||||
homer.setBirthDateElement(new DateType("1950-01-01"));
|
||||
return homer;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -144,6 +145,76 @@ public class HfqlLexerTest {
|
|||
assertEquals("( Observation.value.ofType ( Quantity ) ).unit", lexer.getNextToken(HfqlLexerOptions.FHIRPATH_EXPRESSION).getToken());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
>= , false , HFQL_TOKEN
|
||||
<= , false , HFQL_TOKEN
|
||||
!= , false , HFQL_TOKEN
|
||||
= , false , HFQL_TOKEN
|
||||
>= , true , HFQL_TOKEN
|
||||
<= , true , HFQL_TOKEN
|
||||
!= , true , HFQL_TOKEN
|
||||
~ , true , HFQL_TOKEN
|
||||
= , true , HFQL_TOKEN
|
||||
>= , false , FHIRPATH_EXPRESSION
|
||||
<= , false , FHIRPATH_EXPRESSION
|
||||
!= , false , FHIRPATH_EXPRESSION
|
||||
= , false , FHIRPATH_EXPRESSION
|
||||
>= , true , FHIRPATH_EXPRESSION
|
||||
<= , true , FHIRPATH_EXPRESSION
|
||||
!= , true , FHIRPATH_EXPRESSION
|
||||
~ , true , FHIRPATH_EXPRESSION
|
||||
= , true , FHIRPATH_EXPRESSION
|
||||
>= , false , FHIRPATH_EXPRESSION_PART
|
||||
<= , false , FHIRPATH_EXPRESSION_PART
|
||||
!= , false , FHIRPATH_EXPRESSION_PART
|
||||
= , false , FHIRPATH_EXPRESSION_PART
|
||||
>= , true , FHIRPATH_EXPRESSION_PART
|
||||
<= , true , FHIRPATH_EXPRESSION_PART
|
||||
!= , true , FHIRPATH_EXPRESSION_PART
|
||||
~ , true , FHIRPATH_EXPRESSION_PART
|
||||
= , true , FHIRPATH_EXPRESSION_PART
|
||||
"""
|
||||
)
|
||||
void testComparators(String theComparator, boolean thePad, HfqlLexerOptions theOptions) {
|
||||
String input = """
|
||||
SELECT
|
||||
id
|
||||
FROM
|
||||
Patient
|
||||
WHERE
|
||||
meta.lastUpdated >= '2023-10-09'
|
||||
""";
|
||||
|
||||
String comparator = theComparator.trim();
|
||||
if (thePad) {
|
||||
input = input.replace(" >= ", " " + comparator + " ");
|
||||
} else {
|
||||
input = input.replace(" >= ", comparator);
|
||||
}
|
||||
|
||||
List<String> allTokens = new HfqlLexer(input).allTokens(theOptions);
|
||||
|
||||
List<String> expectedItems = new ArrayList<>();
|
||||
expectedItems.add("SELECT");
|
||||
expectedItems.add("id");
|
||||
expectedItems.add("FROM");
|
||||
expectedItems.add("Patient");
|
||||
expectedItems.add("WHERE");
|
||||
if (theOptions == HfqlLexerOptions.FHIRPATH_EXPRESSION_PART) {
|
||||
expectedItems.add("meta");
|
||||
expectedItems.add(".");
|
||||
expectedItems.add("lastUpdated");
|
||||
} else {
|
||||
expectedItems.add("meta.lastUpdated");
|
||||
}
|
||||
expectedItems.add(comparator);
|
||||
expectedItems.add("'2023-10-09'");
|
||||
|
||||
assertThat(allTokens.toString(), allTokens, contains(expectedItems.toArray(new String[0])));
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"token1 token2 'token3, HFQL_TOKEN",
|
||||
|
|
|
@ -246,9 +246,9 @@ public class HfqlStatementParserTest {
|
|||
|
||||
HfqlStatement statement = parse(input);
|
||||
assertEquals(1, statement.getWhereClauses().size());
|
||||
assertEquals("value.ofType(Quantity).value > 100", statement.getWhereClauses().get(0).getLeft());
|
||||
assertEquals("value.ofType(Quantity).value", statement.getWhereClauses().get(0).getLeft());
|
||||
assertThat(statement.getWhereClauses().get(0).getRightAsStrings(), contains(">", "100"));
|
||||
assertEquals(HfqlStatement.WhereClauseOperatorEnum.UNARY_BOOLEAN, statement.getWhereClauses().get(0).getOperator());
|
||||
assertEquals(0, statement.getWhereClauses().get(0).getRight().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -25,14 +25,15 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import jakarta.persistence.*;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.Length;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
@Entity
|
||||
@Table(
|
||||
name = ResourceHistoryTable.HFJ_RES_VER,
|
||||
|
@ -57,7 +58,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
public static final int ENCODING_COL_LENGTH = 5;
|
||||
|
||||
public static final String HFJ_RES_VER = "HFJ_RES_VER";
|
||||
public static final int RES_TEXT_VC_MAX_LENGTH = 4000;
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Id
|
||||
|
@ -86,13 +86,15 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||
private Collection<ResourceHistoryTag> myTags;
|
||||
|
||||
/**
|
||||
* Note: No setter for this field because it's only a legacy way of storing data now.
|
||||
*/
|
||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
||||
@Lob()
|
||||
@OptimisticLock(excluded = true)
|
||||
private byte[] myResource;
|
||||
|
||||
@Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true)
|
||||
@JdbcTypeCode(SqlTypes.LONG32VARCHAR)
|
||||
@Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceTextVc;
|
||||
|
||||
|
@ -153,7 +155,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
}
|
||||
|
||||
public void setResourceTextVc(String theResourceTextVc) {
|
||||
myResourceTextVc = theResourceTextVc;
|
||||
myResource = null;
|
||||
myResourceTextVc = defaultString(theResourceTextVc);
|
||||
}
|
||||
|
||||
public ResourceHistoryProvenanceEntity getProvenance() {
|
||||
|
@ -209,10 +212,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return myResource;
|
||||
}
|
||||
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getResourceId() {
|
||||
return myResourceId;
|
||||
|
|
|
@ -41,14 +41,12 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
|
|||
public void before() throws Exception {
|
||||
super.before();
|
||||
myPartitionSettings.setPartitioningEnabled(false);
|
||||
myStorageSettings.setInlineResourceTextBelowSize(10000);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
@Override
|
||||
public void after() {
|
||||
super.after();
|
||||
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -91,7 +91,6 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
myStorageSettings.setIndexOnContainedResources(new JpaStorageSettings().isIndexOnContainedResources());
|
||||
myStorageSettings.setIndexOnContainedResourcesRecursively(new JpaStorageSettings().isIndexOnContainedResourcesRecursively());
|
||||
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1,102 +1,61 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.GZipUtil;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test {
|
||||
|
||||
@BeforeEach
|
||||
public void beforeSetDao() {
|
||||
myStorageSettings.setInlineResourceTextBelowSize(5000);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterResetDao() {
|
||||
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateWithInlineResourceTextStorage() {
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
|
||||
public void testRetrieveNonInlinedResource() {
|
||||
IIdType id = createPatient(withActiveTrue());
|
||||
Long pid = id.getIdPartAsLong();
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/" + resourceId);
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
relocateResourceTextToCompressedColumn(pid, 1L);
|
||||
|
||||
runInTransaction(() -> {
|
||||
// Version 1
|
||||
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1);
|
||||
assertNull(entity.getResource());
|
||||
assertThat(entity.getResourceTextVc(), containsString("\"active\":true"));
|
||||
// Version 2
|
||||
entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 2);
|
||||
assertNull(entity.getResource());
|
||||
assertThat(entity.getResourceTextVc(), containsString("\"active\":false"));
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1);
|
||||
assertNotNull(historyEntity.getResource());
|
||||
assertNull(historyEntity.getResourceTextVc());
|
||||
assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding());
|
||||
});
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/" + resourceId));
|
||||
assertFalse(patient.getActive());
|
||||
// Read
|
||||
validatePatient(myPatientDao.read(id.withVersion(null), mySrd));
|
||||
|
||||
patient = (Patient) myPatientDao.search(SearchParameterMap.newSynchronous()).getAllResources().get(0);
|
||||
assertFalse(patient.getActive());
|
||||
// VRead
|
||||
validatePatient(myPatientDao.read(id.withVersion("1"), mySrd));
|
||||
|
||||
// Search (Sync)
|
||||
validatePatient(myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).getResources(0, 1).get(0));
|
||||
|
||||
// Search (Async)
|
||||
validatePatient(myPatientDao.search(new SearchParameterMap(), mySrd).getResources(0, 1).get(0));
|
||||
|
||||
// History
|
||||
validatePatient(myPatientDao.history(id, new HistorySearchDateRangeParam(new HashMap<>(), new DateRangeParam(), 0), mySrd).getResources(0, 1).get(0));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDontUseInlineAboveThreshold() {
|
||||
String veryLongFamilyName = StringUtils.leftPad("", 6000, 'a');
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addName().setFamily(veryLongFamilyName);
|
||||
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
|
||||
|
||||
runInTransaction(() -> {
|
||||
// Version 1
|
||||
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1);
|
||||
assertNotNull(entity.getResource());
|
||||
assertNull(entity.getResourceTextVc());
|
||||
});
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/" + resourceId));
|
||||
assertEquals(veryLongFamilyName, patient.getNameFirstRep().getFamily());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testNopOnUnchangedUpdate() {
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/" + resourceId);
|
||||
patient.setActive(true);
|
||||
DaoMethodOutcome updateOutcome = myPatientDao.update(patient);
|
||||
assertEquals("1", updateOutcome.getId().getVersionIdPart());
|
||||
assertTrue(updateOutcome.isNop());
|
||||
|
||||
private void validatePatient(IBaseResource theRead) {
|
||||
assertTrue(((Patient)theRead).getActive());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1031,26 +1031,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// NoOp OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
|
||||
" false, false, CURRENT_VERSION, 2, 1",
|
||||
" true, false, CURRENT_VERSION, 2, 0",
|
||||
" false, true, CURRENT_VERSION, 12, 1",
|
||||
" true, true, CURRENT_VERSION, 12, 0",
|
||||
" false, false, ALL_VERSIONS, 12, 10",
|
||||
" true, false, ALL_VERSIONS, 12, 0",
|
||||
" false, true, ALL_VERSIONS, 22, 10",
|
||||
" true, true, ALL_VERSIONS, 22, 0",
|
||||
// OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
|
||||
" false, CURRENT_VERSION, 2, 0",
|
||||
" true, CURRENT_VERSION, 12, 0",
|
||||
" false, ALL_VERSIONS, 12, 0",
|
||||
" true, ALL_VERSIONS, 22, 0",
|
||||
})
|
||||
public void testReindexJob_OptimizeStorage(boolean theNoOp, boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
|
||||
public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
|
||||
// Setup
|
||||
|
||||
// In no-op mode, the inlining is already in the state it needs to be in
|
||||
if (theNoOp) {
|
||||
myStorageSettings.setInlineResourceTextBelowSize(10000);
|
||||
} else {
|
||||
myStorageSettings.setInlineResourceTextBelowSize(0);
|
||||
}
|
||||
|
||||
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson();
|
||||
IIdType patientId = createPatient(withActiveTrue());
|
||||
for (int i = 0; i < 10; i++) {
|
||||
|
|
|
@ -274,7 +274,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
ResourceHistoryTable newHistory = table.toHistory(true);
|
||||
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
|
||||
newHistory.setEncoding(currentHistory.getEncoding());
|
||||
newHistory.setResource(currentHistory.getResource());
|
||||
newHistory.setResourceTextVc(currentHistory.getResourceTextVc());
|
||||
myResourceHistoryTableDao.save(newHistory);
|
||||
});
|
||||
|
||||
|
@ -2928,7 +2928,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
|
||||
String newContent = myFhirContext.newJsonParser().encodeResourceToString(p);
|
||||
newContent = newContent.replace("male", "foo");
|
||||
table.setResource(newContent.getBytes(Charsets.UTF_8));
|
||||
table.setResourceTextVc(newContent);
|
||||
table.setEncoding(ResourceEncodingEnum.JSON);
|
||||
myResourceHistoryTableDao.save(table);
|
||||
}
|
||||
|
|
|
@ -620,11 +620,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
template.execute((TransactionCallback<ResourceTable>) t -> {
|
||||
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
|
||||
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
|
||||
try {
|
||||
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new Error(e);
|
||||
}
|
||||
resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}");
|
||||
myResourceHistoryTableDao.save(resourceHistoryTable);
|
||||
|
||||
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
|
||||
|
@ -1917,11 +1913,11 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue(methodName);
|
||||
myPatientDao.create(p, mySrd).getId();
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue(methodName);
|
||||
myPatientDao.create(p, mySrd).getId();
|
||||
myPatientDao.create(p, mySrd);
|
||||
|
||||
Observation o = new Observation();
|
||||
o.getCode().setText("Some Observation");
|
||||
|
|
|
@ -87,6 +87,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
|
|||
createPatient(withActiveTrue());
|
||||
}
|
||||
|
||||
// Move resource text to compressed storage, which we don't write to anymore but legacy
|
||||
// data may exist that was previously stored there, so we're simulating that.
|
||||
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
|
||||
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
|
||||
|
||||
runInTransaction(()->{
|
||||
assertEquals(20, myResourceHistoryTableDao.count());
|
||||
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
|
||||
|
@ -141,6 +146,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
|
|||
createPatient(withActiveTrue());
|
||||
}
|
||||
|
||||
// Move resource text to compressed storage, which we don't write to anymore but legacy
|
||||
// data may exist that was previously stored there, so we're simulating that.
|
||||
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
|
||||
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
|
||||
|
||||
runInTransaction(()->{
|
||||
assertEquals(20, myResourceHistoryTableDao.count());
|
||||
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
|
||||
|
@ -149,8 +159,6 @@ public class ReindexJobTest extends BaseJpaR4Test {
|
|||
}
|
||||
});
|
||||
|
||||
myStorageSettings.setInlineResourceTextBelowSize(10000);
|
||||
|
||||
// execute
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||
|
|
|
@ -38,11 +38,9 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes
|
|||
// Manually set the value to be an invalid decimal number
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1);
|
||||
byte[] bytesCompressed = resVer.getResource();
|
||||
String resourceText = GZipUtil.decompress(bytesCompressed);
|
||||
String resourceText = resVer.getResourceTextVc();
|
||||
resourceText = resourceText.replace("100", "-.100");
|
||||
bytesCompressed = GZipUtil.compress(resourceText);
|
||||
resVer.setResource(bytesCompressed);
|
||||
resVer.setResourceTextVc(resourceText);
|
||||
myResourceHistoryTableDao.save(resVer);
|
||||
});
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -6723,7 +6724,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
// Update Patient after delay
|
||||
int delayInMs = 1000;
|
||||
TimeUnit.MILLISECONDS.sleep(delayInMs);
|
||||
TestUtil.sleepAtLeast(delayInMs + 100);
|
||||
patient.getNameFirstRep().addGiven("Bob");
|
||||
myClient.update().resource(patient).execute();
|
||||
|
||||
|
|
|
@ -397,6 +397,11 @@ public class GiantTransactionPerfTest {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateNonInlinedContents(byte[] theText, long thePid) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<ResourceHistoryTable> findAll() {
|
||||
|
|
|
@ -34,6 +34,20 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Needed for Testcontainers -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.13.2</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.database;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.embedded.JpaEmbeddedDatabase;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
|
||||
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
|
||||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR5Config;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import jakarta.persistence.EntityManagerFactory;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@EnableJpaRepositories(repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class)
|
||||
@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class})
|
||||
public abstract class BaseDatabaseVerificationIT {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseDatabaseVerificationIT.class);
|
||||
private static final String MIGRATION_TABLENAME = "MIGRATIONS";
|
||||
|
||||
@Autowired
|
||||
EntityManagerFactory myEntityManagerFactory;
|
||||
|
||||
@Autowired
|
||||
JpaEmbeddedDatabase myJpaEmbeddedDatabase;
|
||||
|
||||
@Autowired
|
||||
IFhirResourceDao<Patient> myPatientDao;
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(ints = {10, 100000})
|
||||
public void testCreateRead(int theSize) {
|
||||
String name = StringUtils.leftPad("", theSize, "a");
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addName().setFamily(name);
|
||||
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId();
|
||||
|
||||
Patient actual = myPatientDao.read(id, new SystemRequestDetails());
|
||||
assertEquals(name, actual.getName().get(0).getFamily());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDelete() {
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless();
|
||||
|
||||
myPatientDao.delete(id, new SystemRequestDetails());
|
||||
|
||||
assertThrows(ResourceGoneException.class, () -> myPatientDao.read(id, new SystemRequestDetails()));
|
||||
}
|
||||
|
||||
|
||||
@Configuration
|
||||
public static class TestConfig extends TestR5Config {
|
||||
|
||||
@Autowired
|
||||
private JpaDatabaseContextConfigParamObject myJpaDatabaseContextConfigParamObject;
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
public DataSource dataSource() {
|
||||
DataSource dataSource = myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDataSource();
|
||||
|
||||
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType(), MIGRATION_TABLENAME);
|
||||
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
|
||||
|
||||
MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values());
|
||||
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(
|
||||
"HAPI FHIR", MIGRATION_TABLENAME, dataSource, new Properties(), tasks, hapiMigrationStorageSvc);
|
||||
schemaMigrator.setDriverType(myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType());
|
||||
|
||||
ourLog.info("About to run migration...");
|
||||
schemaMigrator.createMigrationTableIfRequired();
|
||||
schemaMigrator.migrate();
|
||||
ourLog.info("Migration complete");
|
||||
|
||||
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JpaEmbeddedDatabase jpaEmbeddedDatabase(JpaDatabaseContextConfigParamObject theJpaDatabaseContextConfigParamObject) {
|
||||
return theJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Properties jpaProperties() {
|
||||
Properties retVal = super.jpaProperties();
|
||||
retVal.put("hibernate.hbm2ddl.auto", "none");
|
||||
retVal.put("hibernate.dialect", myJpaDatabaseContextConfigParamObject.getDialect());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class JpaDatabaseContextConfigParamObject {
|
||||
private JpaEmbeddedDatabase myJpaEmbeddedDatabase;
|
||||
private String myDialect;
|
||||
|
||||
public JpaDatabaseContextConfigParamObject(JpaEmbeddedDatabase theJpaEmbeddedDatabase, String theDialect) {
|
||||
myJpaEmbeddedDatabase = theJpaEmbeddedDatabase;
|
||||
myDialect = theDialect;
|
||||
}
|
||||
|
||||
public JpaEmbeddedDatabase getJpaEmbeddedDatabase() {
|
||||
return myJpaEmbeddedDatabase;
|
||||
}
|
||||
|
||||
public String getDialect() {
|
||||
return myDialect;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.database;
|
||||
|
||||
import ca.uhn.fhir.jpa.embedded.MsSqlEmbeddedDatabase;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
|
||||
@ContextConfiguration(classes = {
|
||||
DatabaseVerificationWithMsSqlIT.TestConfig.class
|
||||
})
|
||||
public class DatabaseVerificationWithMsSqlIT extends BaseDatabaseVerificationIT {
|
||||
|
||||
@Configuration
|
||||
public static class TestConfig {
|
||||
@Bean
|
||||
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
|
||||
return new JpaDatabaseContextConfigParamObject(
|
||||
new MsSqlEmbeddedDatabase(),
|
||||
HapiFhirSQLServerDialect.class.getName()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.database;
|
||||
|
||||
import ca.uhn.fhir.jpa.embedded.OracleEmbeddedDatabase;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
|
||||
@ContextConfiguration(classes = {
|
||||
DatabaseVerificationWithOracleIT.TestConfig.class
|
||||
})
|
||||
public class DatabaseVerificationWithOracleIT extends BaseDatabaseVerificationIT {
|
||||
|
||||
@Configuration
|
||||
public static class TestConfig {
|
||||
@Bean
|
||||
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject(){
|
||||
return new JpaDatabaseContextConfigParamObject(
|
||||
new OracleEmbeddedDatabase(),
|
||||
HapiFhirOracleDialect.class.getName()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.database;
|
||||
|
||||
import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
|
||||
@ContextConfiguration(classes = {
|
||||
DatabaseVerificationWithPostgresIT.TestConfig.class
|
||||
})
|
||||
public class DatabaseVerificationWithPostgresIT extends BaseDatabaseVerificationIT {
|
||||
|
||||
@Configuration
|
||||
public static class TestConfig {
|
||||
@Bean
|
||||
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
|
||||
return new JpaDatabaseContextConfigParamObject(
|
||||
new PostgresEmbeddedDatabase(),
|
||||
HapiFhirPostgresDialect.class.getName()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
Binary file not shown.
Binary file not shown.
|
@ -131,20 +131,14 @@
|
|||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>1.17.6</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>mssqlserver</artifactId>
|
||||
<version>1.17.6</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>oracle-xe</artifactId>
|
||||
<version>1.17.6</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.embedded;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -53,6 +54,7 @@ public abstract class JpaEmbeddedDatabase {
|
|||
private JdbcTemplate myJdbcTemplate;
|
||||
private Connection myConnection;
|
||||
|
||||
@PreDestroy
|
||||
public abstract void stop();
|
||||
|
||||
public abstract void disableConstraints();
|
||||
|
@ -116,7 +118,7 @@ public abstract class JpaEmbeddedDatabase {
|
|||
for (String sql : theStatements) {
|
||||
if (!StringUtils.isBlank(sql)) {
|
||||
statement.addBatch(sql);
|
||||
ourLog.info("Added to batch: {}", sql);
|
||||
ourLog.debug("Added to batch: {}", sql);
|
||||
}
|
||||
}
|
||||
statement.executeBatch();
|
||||
|
|
|
@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
|||
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.GZipUtil;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
|
@ -82,6 +83,7 @@ import ca.uhn.fhir.jpa.entity.TermValueSet;
|
|||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
|
||||
|
@ -663,6 +665,14 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
return myTxManager;
|
||||
}
|
||||
|
||||
protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) {
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion);
|
||||
byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc());
|
||||
myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId());
|
||||
});
|
||||
}
|
||||
|
||||
protected ValidationResult validateWithResult(IBaseResource theResource) {
|
||||
FhirValidator validatorModule = myFhirContext.newValidator();
|
||||
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myValidationSupport);
|
||||
|
|
|
@ -186,7 +186,7 @@ public class TestR5Config {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private Properties jpaProperties() {
|
||||
protected Properties jpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.format_sql", "false");
|
||||
extraProperties.put("hibernate.show_sql", "false");
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.RowMapper;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Utility methods to be used by migrator functionality that needs to invoke JDBC directly.
|
||||
*/
|
||||
public class MigrationJdbcUtils {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MigrationJdbcUtils.class);
|
||||
|
||||
public static boolean queryForSingleBooleanResultMultipleThrowsException(
|
||||
String theSql, JdbcTemplate theJdbcTemplate) {
|
||||
final RowMapper<Boolean> booleanRowMapper = (theResultSet, theRowNumber) -> theResultSet.getBoolean(1);
|
||||
return queryForSingle(theSql, theJdbcTemplate, booleanRowMapper).orElse(false);
|
||||
}
|
||||
|
||||
private static <T> Optional<T> queryForSingle(
|
||||
String theSql, JdbcTemplate theJdbcTemplate, RowMapper<T> theRowMapper) {
|
||||
final List<T> results = queryForMultiple(theSql, theJdbcTemplate, theRowMapper);
|
||||
|
||||
if (results.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
if (results.size() > 1) {
|
||||
// Presumably other callers may want different behaviour but in this case more than one result should be
|
||||
// considered a hard failure distinct from an empty result, which is one expected outcome.
|
||||
throw new IllegalArgumentException(Msg.code(2474)
|
||||
+ String.format(
|
||||
"Failure due to query returning more than one result: %s for SQL: [%s].", results, theSql));
|
||||
}
|
||||
|
||||
return Optional.ofNullable(results.get(0));
|
||||
}
|
||||
|
||||
private static <T> List<T> queryForMultiple(
|
||||
String theSql, JdbcTemplate theJdbcTemplate, RowMapper<T> theRowMapper) {
|
||||
return theJdbcTemplate.query(theSql, theRowMapper);
|
||||
}
|
||||
}
|
|
@ -250,6 +250,8 @@ public abstract class BaseTask {
|
|||
return getConnectionProperties().newJdbcTemplate();
|
||||
}
|
||||
|
||||
private final List<ExecuteTaskPrecondition> myPreconditions = new ArrayList<>();
|
||||
|
||||
public void execute() throws SQLException {
|
||||
if (myDoNothing) {
|
||||
ourLog.info("Skipping stubbed task: {}", getDescription());
|
||||
|
@ -257,7 +259,17 @@ public abstract class BaseTask {
|
|||
}
|
||||
if (!myOnlyAppliesToPlatforms.isEmpty()) {
|
||||
if (!myOnlyAppliesToPlatforms.contains(getDriverType())) {
|
||||
ourLog.debug("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
|
||||
ourLog.info("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (ExecuteTaskPrecondition precondition : myPreconditions) {
|
||||
ourLog.debug("precondition to evaluate: {}", precondition);
|
||||
if (!precondition.getPreconditionRunner().get()) {
|
||||
ourLog.info(
|
||||
"Skipping task since one of the preconditions was not met: {}",
|
||||
precondition.getPreconditionReason());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -305,6 +317,10 @@ public abstract class BaseTask {
|
|||
return this;
|
||||
}
|
||||
|
||||
public void addPrecondition(ExecuteTaskPrecondition thePrecondition) {
|
||||
myPreconditions.add(thePrecondition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
HashCodeBuilder builder = new HashCodeBuilder();
|
||||
|
|
|
@ -34,7 +34,7 @@ public enum ColumnTypeEnum {
|
|||
|
||||
/**
|
||||
* Unlimited length text, with a column definition containing the annotation:
|
||||
* <code>@JdbcTypeCode(SqlTypes.LONG32VARCHAR)</code>
|
||||
* <code>@Column(length=Integer.MAX_VALUE)</code>
|
||||
*/
|
||||
TEXT,
|
||||
BIG_DECIMAL;
|
||||
|
|
|
@ -62,7 +62,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
|
|||
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MYSQL_5_7, "double precision");
|
||||
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MSSQL_2012, "double precision");
|
||||
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.ORACLE_12C, "double precision");
|
||||
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "float8");
|
||||
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "double precision");
|
||||
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.H2_EMBEDDED, "bigint");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint");
|
||||
|
@ -123,7 +123,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
|
|||
"oid"); // the PG driver will write oid into a `text` column
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)");
|
||||
|
||||
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "character large object");
|
||||
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "clob");
|
||||
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.DERBY_EMBEDDED, "clob");
|
||||
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MARIADB_10_1, "longtext");
|
||||
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MYSQL_5_7, "longtext");
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* Contains a pre-built precondition to evaluate once {@link BaseTask#execute()} is called.
|
||||
* <p/>
|
||||
* Includes both a {@link Supplier} containing the logic to determine if the precondition evaluates to true or false and
|
||||
* a reason String to output to the logs if the precondition evaluates to false and halts execution of the task.
|
||||
*/
|
||||
public class ExecuteTaskPrecondition {
|
||||
private final Supplier<Boolean> myPreconditionRunner;
|
||||
private final String myPreconditionReason;
|
||||
|
||||
public ExecuteTaskPrecondition(Supplier<Boolean> thePreconditionRunner, String thePreconditionReason) {
|
||||
myPreconditionRunner = thePreconditionRunner;
|
||||
myPreconditionReason = thePreconditionReason;
|
||||
}
|
||||
|
||||
public Supplier<Boolean> getPreconditionRunner() {
|
||||
return myPreconditionRunner;
|
||||
}
|
||||
|
||||
public String getPreconditionReason() {
|
||||
return myPreconditionReason;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ExecuteTaskPrecondition that = (ExecuteTaskPrecondition) theO;
|
||||
return Objects.equals(myPreconditionRunner, that.myPreconditionRunner)
|
||||
&& Objects.equals(myPreconditionReason, that.myPreconditionReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(myPreconditionRunner, myPreconditionReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringJoiner(", ", ExecuteTaskPrecondition.class.getSimpleName() + "[", "]")
|
||||
.add("myPreconditionRunner=" + myPreconditionRunner)
|
||||
.add("myPreconditionReason='" + myPreconditionReason + "'")
|
||||
.toString();
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks.api;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.MigrationJdbcUtils;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddForeignKeyTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddIdGeneratorTask;
|
||||
|
@ -36,6 +37,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropIdGeneratorTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.MigratePostgresTextClobToBinaryClobTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ModifyColumnTask;
|
||||
|
@ -44,6 +46,8 @@ import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
|
|||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -54,6 +58,7 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
public class Builder {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(Builder.class);
|
||||
|
||||
private final String myRelease;
|
||||
private final BaseMigrationTasks.IAcceptsTasks mySink;
|
||||
|
@ -571,6 +576,40 @@ public class Builder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Introduce precondition checking logic into the execution of the enclosed task. This conditional logic will
|
||||
* be implemented by running an SQL SELECT (including CTEs) to obtain a boolean indicating whether a certain
|
||||
* condition has been met.
|
||||
* One example is to check for a specific collation on a column to decide whether to create a new index.
|
||||
* <p/>
|
||||
* This method may be called multiple times to add multiple preconditions. The precondition that evaluates to
|
||||
* false will stop execution of the task irrespective of any or all other tasks evaluating to true.
|
||||
*
|
||||
* @param theSql The SELECT or CTE used to determine if the precondition is valid.
|
||||
* @param reason A String to indicate the text that is logged if the precondition is not met.
|
||||
* @return The BuilderCompleteTask in order to chain further method calls on this builder.
|
||||
*/
|
||||
public BuilderCompleteTask onlyIf(@Language("SQL") String theSql, String reason) {
|
||||
if (!theSql.toUpperCase().startsWith("WITH")
|
||||
&& !theSql.toUpperCase().startsWith("SELECT")) {
|
||||
throw new IllegalArgumentException(Msg.code(2455)
|
||||
+ String.format(
|
||||
"Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [%s]",
|
||||
theSql));
|
||||
}
|
||||
ourLog.info("SQL to evaluate: {}", theSql);
|
||||
|
||||
myTask.addPrecondition(new ExecuteTaskPrecondition(
|
||||
() -> {
|
||||
ourLog.info("Checking precondition for SQL: {}", theSql);
|
||||
return MigrationJdbcUtils.queryForSingleBooleanResultMultipleThrowsException(
|
||||
theSql, myTask.newJdbcTemplate());
|
||||
},
|
||||
reason));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public BuilderCompleteTask runEvenDuringSchemaInitialization() {
|
||||
myTask.setRunDuringSchemaInitialization(true);
|
||||
return this;
|
||||
|
|
|
@ -7,8 +7,10 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
|||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.h2.Driver;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
|
@ -25,8 +27,53 @@ import java.util.stream.Stream;
|
|||
public abstract class BaseTest {
|
||||
|
||||
private static final String DATABASE_NAME = "DATABASE";
|
||||
static final String H2 = "H2";
|
||||
static final String DERBY = "Derby";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
|
||||
private static int ourDatabaseUrl = 0;
|
||||
private static final Supplier<TestDatabaseDetails> TEST_DATABASE_DETAILS_DERBY_SUPPLIER = new Supplier<>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
ourLog.info("Derby: {}", DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
|
||||
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
|
||||
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return DERBY;
|
||||
}
|
||||
};
|
||||
|
||||
private static final Supplier<TestDatabaseDetails> TEST_DATABASE_DETAILS_H2_SUPPLIER = new Supplier<>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
ourLog.info("H2: {}", Driver.class);
|
||||
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
|
||||
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
|
||||
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return H2;
|
||||
}
|
||||
};
|
||||
|
||||
private BasicDataSource myDataSource;
|
||||
private String myUrl;
|
||||
private HapiMigrator myMigrator;
|
||||
|
@ -34,54 +81,28 @@ public abstract class BaseTest {
|
|||
protected HapiMigrationDao myHapiMigrationDao;
|
||||
protected HapiMigrationStorageSvc myHapiMigrationStorageSvc;
|
||||
|
||||
public static Stream<Arguments> dataWithEvaluationResults() {
|
||||
return Stream.of(
|
||||
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(true, true), true),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(false, true), false),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(true, false), false),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(false, false), false),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(true, true), true),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(false, true), false),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(true, false), false),
|
||||
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(false, false), false)
|
||||
);
|
||||
}
|
||||
|
||||
public static Stream<Supplier<TestDatabaseDetails>> data() {
|
||||
|
||||
ArrayList<Supplier<TestDatabaseDetails>> retVal = new ArrayList<>();
|
||||
|
||||
// H2
|
||||
retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
ourLog.info("H2: {}", org.h2.Driver.class.toString());
|
||||
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
|
||||
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
|
||||
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "H2";
|
||||
}
|
||||
});
|
||||
retVal.add(TEST_DATABASE_DETAILS_H2_SUPPLIER);
|
||||
|
||||
// Derby
|
||||
retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
ourLog.info("Derby: {}", DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
|
||||
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
|
||||
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Derby";
|
||||
}
|
||||
});
|
||||
retVal.add(TEST_DATABASE_DETAILS_DERBY_SUPPLIER);
|
||||
|
||||
return retVal.stream();
|
||||
}
|
||||
|
|
|
@ -2,9 +2,12 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -12,9 +15,11 @@ import java.util.Map;
|
|||
import java.util.function.Supplier;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class ExecuteRawSqlTaskTest extends BaseTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ExecuteRawSqlTaskTest.class);
|
||||
|
||||
@ParameterizedTest(name = "{index}: {0}")
|
||||
@MethodSource("data")
|
||||
|
@ -135,4 +140,100 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
|
|||
|
||||
assertEquals(0, output.size());
|
||||
}
|
||||
|
||||
@ParameterizedTest()
|
||||
@MethodSource("dataWithEvaluationResults")
|
||||
public void testExecuteRawSqlTaskWithPrecondition(Supplier<TestDatabaseDetails> theTestDatabaseDetails, List<Boolean> thePreconditionOutcomes, boolean theIsExecutionExpected) {
|
||||
before(theTestDatabaseDetails);
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
|
||||
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
|
||||
|
||||
assertTrue(outputPreMigrate.isEmpty());
|
||||
|
||||
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')";
|
||||
final String someReason = "I dont feel like it!";
|
||||
|
||||
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
|
||||
|
||||
final Builder.BuilderCompleteTask builderCompleteTask = tasks.forVersion(VersionEnum.V4_0_0)
|
||||
.executeRawSql("2024.02", someFakeUpdateSql);
|
||||
|
||||
for (boolean preconditionOutcome: thePreconditionOutcomes) {
|
||||
final String someFakeSelectSql =
|
||||
String.format("SELECT %s %s", preconditionOutcome,
|
||||
(BaseTest.DERBY.equals(theTestDatabaseDetails.toString())) ? "FROM SYSIBM.SYSDUMMY1" : "");
|
||||
builderCompleteTask.onlyIf(someFakeSelectSql, someReason);
|
||||
}
|
||||
|
||||
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
|
||||
getMigrator().migrate();
|
||||
|
||||
final List<Map<String, Object>> outputPostMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
|
||||
|
||||
if (theIsExecutionExpected) {
|
||||
assertEquals(1, outputPostMigrate.size());
|
||||
assertEquals(123L, outputPostMigrate.get(0).get("PID"));
|
||||
assertEquals("abc", outputPostMigrate.get(0).get("TEXTCOL"));
|
||||
} else {
|
||||
assertTrue(outputPreMigrate.isEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest()
|
||||
@MethodSource("data")
|
||||
public void testExecuteRawSqlTaskWithPreconditionInvalidPreconditionSql(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
|
||||
before(theTestDatabaseDetails);
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
|
||||
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
|
||||
|
||||
assertTrue(outputPreMigrate.isEmpty());
|
||||
|
||||
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')";
|
||||
final String someFakeSelectSql = "UPDATE SOMETABLE SET PID = 1";
|
||||
final String someReason = "I dont feel like it!";
|
||||
|
||||
try {
|
||||
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
|
||||
tasks.forVersion(VersionEnum.V4_0_0)
|
||||
.executeRawSql("2024.02", someFakeUpdateSql)
|
||||
.onlyIf(someFakeSelectSql, someReason);
|
||||
|
||||
fail();
|
||||
} catch (IllegalArgumentException exception) {
|
||||
assertEquals("HAPI-2455: Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [UPDATE SOMETABLE SET PID = 1]", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest()
|
||||
@MethodSource("data")
|
||||
public void testExecuteRawSqlTaskWithPreconditionPreconditionSqlReturnsMultiple(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
|
||||
before(theTestDatabaseDetails);
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')");
|
||||
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (456, 'def')");
|
||||
|
||||
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
|
||||
|
||||
assertEquals(2, outputPreMigrate.size());
|
||||
|
||||
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (789, 'xyz')";
|
||||
final String someFakeSelectSql = "SELECT PID != 0 FROM SOMETABLE";
|
||||
final String someReason = "I dont feel like it!";
|
||||
|
||||
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
|
||||
|
||||
final Builder.BuilderCompleteTask builderCompleteTask = tasks.forVersion(VersionEnum.V4_0_0)
|
||||
.executeRawSql("2024.02", someFakeUpdateSql);
|
||||
builderCompleteTask.onlyIf(someFakeSelectSql, someReason);
|
||||
|
||||
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
|
||||
try {
|
||||
getMigrator().migrate();
|
||||
fail();
|
||||
} catch (IllegalArgumentException exception) {
|
||||
assertEquals("HAPI-2474: Failure due to query returning more than one result: [true, true] for SQL: [SELECT PID != 0 FROM SOMETABLE].", exception.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -267,14 +267,6 @@ public class JpaStorageSettings extends StorageSettings {
|
|||
* @since 5.6.0
|
||||
*/
|
||||
private boolean myAdvancedHSearchIndexing = false;
|
||||
/**
|
||||
* If set to a positive number, any resources with a character length at or below the given number
|
||||
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
|
||||
* separate LOB column.
|
||||
*
|
||||
* @since 5.7.0
|
||||
*/
|
||||
private int myInlineResourceTextBelowSize = 0;
|
||||
|
||||
/**
|
||||
* @since 5.7.0
|
||||
|
@ -381,25 +373,21 @@ public class JpaStorageSettings extends StorageSettings {
|
|||
}
|
||||
|
||||
/**
|
||||
* If set to a positive number, any resources with a character length at or below the given number
|
||||
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
|
||||
* separate LOB column.
|
||||
*
|
||||
* @since 5.7.0
|
||||
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
|
||||
*/
|
||||
@Deprecated
|
||||
public int getInlineResourceTextBelowSize() {
|
||||
return myInlineResourceTextBelowSize;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to a positive number, any resources with a character length at or below the given number
|
||||
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
|
||||
* separate LOB column.
|
||||
*
|
||||
* @since 5.7.0
|
||||
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
|
||||
*/
|
||||
@Deprecated
|
||||
public void setInlineResourceTextBelowSize(int theInlineResourceTextBelowSize) {
|
||||
myInlineResourceTextBelowSize = theInlineResourceTextBelowSize;
|
||||
// ignored
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -361,9 +361,6 @@ public class JpaModelScannerAndVerifier {
|
|||
if (!theIsView && column.length() == 255) {
|
||||
throw new IllegalStateException(Msg.code(1626) + "Field does not have an explicit maximum length specified: " + field);
|
||||
}
|
||||
if (column.length() > MAX_COL_LENGTH) {
|
||||
throw new IllegalStateException(Msg.code(1627) + "Field is too long: " + field);
|
||||
}
|
||||
}
|
||||
|
||||
Size size = theAnnotatedElement.getAnnotation(Size.class);
|
||||
|
|
|
@ -184,14 +184,6 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.maven</groupId>
|
||||
<artifactId>maven-plugin-api</artifactId>
|
||||
<!--
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.sisu</groupId>
|
||||
<artifactId>org.eclipse.sisu.plexus</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
-->
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.plugin-tools</groupId>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.tinder.ddl;
|
||||
|
||||
import ca.uhn.fhir.jpa.util.ISequenceValueMassager;
|
||||
import ca.uhn.fhir.util.IoUtil;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.MappedSuperclass;
|
||||
|
@ -29,6 +30,7 @@ import org.springframework.core.io.ResourceLoader;
|
|||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
|
@ -125,19 +127,8 @@ public class DdlGeneratorHibernate61 {
|
|||
|
||||
writeContentsToFile(nextDialect.getAppendFile(), classLoader, outputFile);
|
||||
}
|
||||
}
|
||||
|
||||
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
|
||||
throws MojoFailureException {
|
||||
if (isNotBlank(prependFile)) {
|
||||
ResourceLoader loader = new DefaultResourceLoader(classLoader);
|
||||
Resource resource = loader.getResource(prependFile);
|
||||
try (Writer w = new FileWriter(outputFile, true)) {
|
||||
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
|
||||
} catch (IOException e) {
|
||||
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
IoUtil.closeQuietly(connectionProvider);
|
||||
}
|
||||
|
||||
public void setProject(MavenProject theProject) {
|
||||
|
@ -204,18 +195,64 @@ public class DdlGeneratorHibernate61 {
|
|||
* here. The schema export doesn't actually touch this DB, so it doesn't
|
||||
* matter that it doesn't correlate to the specified dialect.
|
||||
*/
|
||||
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl {
|
||||
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl
|
||||
implements Closeable {
|
||||
private static final long serialVersionUID = 4147495169899817244L;
|
||||
private Connection connection;
|
||||
|
||||
@Override
|
||||
public Connection getConnection() throws SQLException {
|
||||
ourLog.trace("Using internal driver: {}", org.h2.Driver.class);
|
||||
return DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa");
|
||||
public FakeConnectionConnectionProvider() {
|
||||
try {
|
||||
connection = DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa");
|
||||
} catch (SQLException e) {
|
||||
connection = null;
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* The Oracle Dialect tries to query for any existing sequences, so we need to supply
|
||||
* a fake empty table to answer that query.
|
||||
*/
|
||||
try {
|
||||
connection.setAutoCommit(true);
|
||||
connection
|
||||
.prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))")
|
||||
.execute();
|
||||
} catch (SQLException e) {
|
||||
ourLog.error("Failed to create sequences table", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeConnection(Connection conn) throws SQLException {
|
||||
conn.close();
|
||||
public Connection getConnection() {
|
||||
ourLog.trace("Using internal driver: {}", org.h2.Driver.class);
|
||||
return connection;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeConnection(Connection conn) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
|
||||
throws MojoFailureException {
|
||||
if (isNotBlank(prependFile)) {
|
||||
ResourceLoader loader = new DefaultResourceLoader(classLoader);
|
||||
Resource resource = loader.getResource(prependFile);
|
||||
try (Writer w = new FileWriter(outputFile, true)) {
|
||||
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
|
||||
} catch (IOException e) {
|
||||
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,13 +28,13 @@ public class GenerateDdlMojo extends AbstractMojo {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(GenerateDdlMojo.class);
|
||||
|
||||
@Parameter
|
||||
private List<String> packageNames;
|
||||
List<String> packageNames;
|
||||
|
||||
@Parameter
|
||||
private List<Dialect> dialects;
|
||||
List<Dialect> dialects;
|
||||
|
||||
@Parameter
|
||||
private String outputDirectory;
|
||||
String outputDirectory;
|
||||
|
||||
@Parameter(defaultValue = "${project}", readonly = true)
|
||||
private transient MavenProject project;
|
||||
|
@ -70,18 +70,20 @@ public class GenerateDdlMojo extends AbstractMojo {
|
|||
|
||||
public static void main(String[] args) throws MojoExecutionException, MojoFailureException {
|
||||
/*
|
||||
* Note, to execute this, add the following snippet to this module's POM. The whole project won't work with
|
||||
* Note, to execute this for real entities, add the following snippet to this module's POM. The whole project won't work with
|
||||
* that added, but you can add it temporarily in order to debug this in IJ:
|
||||
* <dependency>
|
||||
* <groupId>ca.uhn.hapi.fhir</groupId>
|
||||
* <artifactId>hapi-fhir-jpaserver-model</artifactId>
|
||||
* <version>${project.version}</version>
|
||||
* </dependency>
|
||||
*
|
||||
* Alternately, there is a unit test with fake entities that also runs this class.
|
||||
*/
|
||||
GenerateDdlMojo m = new GenerateDdlMojo();
|
||||
m.packageNames = List.of("ca.uhn.fhir.jpa.model.entity");
|
||||
m.outputDirectory = "hapi-tinder-plugin/target";
|
||||
m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"));
|
||||
m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"));
|
||||
m.execute();
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
package ca.uhn.fhir.tinder.ddl;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugin.MojoFailureException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
class GenerateDdlMojoTest {
|
||||
|
||||
@Test
|
||||
public void testGenerateSequences() throws MojoExecutionException, MojoFailureException, IOException {
|
||||
|
||||
GenerateDdlMojo m = new GenerateDdlMojo();
|
||||
m.packageNames = List.of("ca.uhn.fhir.tinder.ddl.test");
|
||||
m.outputDirectory = "target/generate-ddl-plugin-test/";
|
||||
m.dialects = List.of(
|
||||
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"),
|
||||
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"),
|
||||
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect", "oracle.sql"),
|
||||
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect", "sqlserver.sql")
|
||||
);
|
||||
m.execute();
|
||||
|
||||
verifySequence("sqlserver.sql");
|
||||
verifySequence("oracle.sql");
|
||||
verifySequence("postgres.sql");
|
||||
verifySequence("h2.sql");
|
||||
|
||||
}
|
||||
|
||||
private static void verifySequence(String fileName) throws IOException {
|
||||
String contents = FileUtils.readFileToString(new File("target/generate-ddl-plugin-test/" + fileName), StandardCharsets.UTF_8).toUpperCase(Locale.ROOT);
|
||||
assertThat(fileName, contents, containsString("CREATE SEQUENCE"));
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
package ca.uhn.fhir.tinder.ddl.test;
|
||||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.GeneratedValue;
|
||||
import jakarta.persistence.GenerationType;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.SequenceGenerator;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table()
|
||||
@Entity()
|
||||
public class ExampleEntity {
|
||||
|
||||
@Id
|
||||
@SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "RES_ID", nullable = false, updatable = false, insertable = false)
|
||||
private Long myResourceId;
|
||||
|
||||
|
||||
}
|
18
pom.xml
18
pom.xml
|
@ -2188,7 +2188,21 @@
|
|||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>testcontainers</artifactId>
|
||||
<version>${testcontainers_version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>${testcontainers_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>mssqlserver</artifactId>
|
||||
<version>${testcontainers_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>oracle-xe</artifactId>
|
||||
<version>${testcontainers_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
|
@ -2343,7 +2357,7 @@
|
|||
<maxmem>2000m</maxmem>
|
||||
<compilerArgs>
|
||||
<arg>-XDcompilePolicy=simple</arg>
|
||||
<arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF</arg>
|
||||
<arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF -XepExcludedPaths:.*/src/test/java/.*</arg>
|
||||
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED</arg>
|
||||
|
|
Loading…
Reference in New Issue