Fix JPA nullable primitive columns (#5926)

* Fix JPA nullable primitive columns

* Add changelog

* Spotless

* License headers

* Migration fix

* Spotless

* Cleanup

* Cleanup

* Add task skipping

* Update docs

* CLeanup

* Spotless

* Address review comments

* Test fix

* HAPI FHIR version bump
This commit is contained in:
James Agnew 2024-06-18 13:05:29 -04:00 committed by GitHub
parent ec0021cd40
commit 47d6e357b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
111 changed files with 723 additions and 354 deletions

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -40,12 +38,12 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
* NB since 2019-12-05: This class is kind of weirdly named now, since it can either use Flyway or not use Flyway * NB since 2019-12-05: This class is kind of weirdly named now, since it can either use Flyway or not use Flyway
*/ */
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand { public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
private static final Logger ourLog = LoggerFactory.getLogger(BaseFlywayMigrateDatabaseCommand.class);
public static final String MIGRATE_DATABASE = "migrate-database"; public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink"; public static final String NO_COLUMN_SHRINK = "no-column-shrink";
public static final String STRICT_ORDER = "strict-order";
public static final String SKIP_VERSIONS = "skip-versions"; public static final String SKIP_VERSIONS = "skip-versions";
public static final String ENABLE_HEAVYWEIGHT_MIGRATIONS = "enable-heavyweight-migrations";
private Set<String> myFlags; private Set<String> myFlags;
private String myMigrationTableName; private String myMigrationTableName;
@ -100,6 +98,12 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
SKIP_VERSIONS, SKIP_VERSIONS,
"Versions", "Versions",
"A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4"); "A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4");
addOptionalOption(
retVal,
null,
ENABLE_HEAVYWEIGHT_MIGRATIONS,
false,
"If this flag is set, additional migration tasks will be executed that are considered unnecessary to execute on a database with a significant amount of data loaded. This option is not generally necessary.");
return retVal; return retVal;
} }
@ -125,6 +129,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
boolean dryRun = theCommandLine.hasOption("r"); boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.NO_COLUMN_SHRINK); boolean noColumnShrink = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.NO_COLUMN_SHRINK);
boolean runHeavyweight = theCommandLine.hasOption(ENABLE_HEAVYWEIGHT_MIGRATIONS);
String flags = theCommandLine.getOptionValue("x"); String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(",")) myFlags = Arrays.stream(defaultString(flags).split(","))
@ -139,6 +144,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
migrator.createMigrationTableIfRequired(); migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun); migrator.setDryRun(dryRun);
migrator.setRunHeavyweightSkippableTasks(runHeavyweight);
migrator.setNoColumnShrink(noColumnShrink); migrator.setNoColumnShrink(noColumnShrink);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS); String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions); addTasks(migrator, skipVersions);

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5926
title: "A number of columns in the JPA schema use primitive types (and therefore can never have a null value) but aren't marked as non-null."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5926
title: "A regression in HAPI FHIR 6.4.0 meant that ther JPA server schema migrator ran all tasks
even when the database was initially empty and the schema was being initialized by script.
This did not produce any incorrect results, but did impact the amount of time taken to initialize
an empty database. This has been corrected."

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -119,13 +119,13 @@ public class Batch2JobInstanceEntity implements Serializable {
@Column(name = "WORK_CHUNKS_PURGED", nullable = false) @Column(name = "WORK_CHUNKS_PURGED", nullable = false)
private boolean myWorkChunksPurged; private boolean myWorkChunksPurged;
@Column(name = "PROGRESS_PCT") @Column(name = "PROGRESS_PCT", nullable = false)
private double myProgress; private double myProgress;
@Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true) @Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true)
private String myErrorMessage; private String myErrorMessage;
@Column(name = "ERROR_COUNT") @Column(name = "ERROR_COUNT", nullable = false)
private int myErrorCount; private int myErrorCount;
@Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true) @Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true)

View File

@ -138,6 +138,10 @@ public class Batch2WorkChunkEntity implements Serializable {
/** /**
* The number of times the work chunk has had its state set back to POLL_WAITING. * The number of times the work chunk has had its state set back to POLL_WAITING.
* <p>
* TODO: Note that this column was added in 7.2.0, so it is nullable in order to
* account for existing rows that were added before the column was added. In
* the future we should make this non-null.
*/ */
@Column(name = "POLL_ATTEMPTS", nullable = true) @Column(name = "POLL_ATTEMPTS", nullable = true)
private Integer myPollAttempts; private Integer myPollAttempts;
@ -145,7 +149,9 @@ public class Batch2WorkChunkEntity implements Serializable {
/** /**
* Default constructor for Hibernate. * Default constructor for Hibernate.
*/ */
public Batch2WorkChunkEntity() {} public Batch2WorkChunkEntity() {
myPollAttempts = 0;
}
/** /**
* Projection constructor for no-data path. * Projection constructor for no-data path.
@ -184,7 +190,7 @@ public class Batch2WorkChunkEntity implements Serializable {
myRecordsProcessed = theRecordsProcessed; myRecordsProcessed = theRecordsProcessed;
myWarningMessage = theWarningMessage; myWarningMessage = theWarningMessage;
myNextPollTime = theNextPollTime; myNextPollTime = theNextPollTime;
myPollAttempts = thePollAttempts; myPollAttempts = thePollAttempts != null ? thePollAttempts : 0;
} }
public static Batch2WorkChunkEntity fromWorkChunk(WorkChunk theWorkChunk) { public static Batch2WorkChunkEntity fromWorkChunk(WorkChunk theWorkChunk) {

View File

@ -58,7 +58,7 @@ public class HapiFhirEnversRevision implements Serializable {
@SequenceGenerator(name = "SEQ_HFJ_REVINFO", sequenceName = "SEQ_HFJ_REVINFO") @SequenceGenerator(name = "SEQ_HFJ_REVINFO", sequenceName = "SEQ_HFJ_REVINFO")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_HFJ_REVINFO") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_HFJ_REVINFO")
@RevisionNumber @RevisionNumber
@Column(name = "REV") @Column(name = "REV", nullable = false)
private long myRev; private long myRev;
@RevisionTimestamp @RevisionTimestamp

View File

@ -117,11 +117,12 @@ public class TermConcept implements Serializable {
@ManyToOne(fetch = FetchType.LAZY) @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn( @JoinColumn(
name = "CODESYSTEM_PID", name = "CODESYSTEM_PID",
nullable = false,
referencedColumnName = "PID", referencedColumnName = "PID",
foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID")) foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem; private TermCodeSystemVersion myCodeSystem;
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false) @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
@GenericField(name = "myCodeSystemVersionPid") @GenericField(name = "myCodeSystemVersionPid")
private long myCodeSystemVersionPid; private long myCodeSystemVersionPid;

View File

@ -25,7 +25,6 @@ import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask; import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum; import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
@ -33,6 +32,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationCopyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask; import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks; import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder; import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
@ -135,6 +135,121 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.toColumn("RES_ID") .toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID"); .references("HFJ_RESOURCE", "RES_ID");
} }
/*
* Make a bunch of columns non-nullable. Note that we won't actually apply this migration
* on the live system as it would take a loooooong time to execute these on heavily loaded
* databases.
*/
// Skipping numbers 20240601.1 and 20240601.2 as they were found to not
// be needed during code review.
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.3", "SP_HAS_LINKS")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.4", "SP_COORDS_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.5", "SP_DATE_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.6", "SP_NUMBER_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.7", "SP_QUANTITY_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.8", "SP_QUANTITY_NRML_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.9", "SP_STRING_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.10", "SP_TOKEN_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.11", "SP_URI_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.12", "RES_VER")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("TRM_CONCEPT")
.modifyColumn("20240601.13", "CODESYSTEM_PID")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("BT2_JOB_INSTANCE")
.modifyColumn("20240601.14", "PROGRESS_PCT")
.nonNullable()
.withType(ColumnTypeEnum.DOUBLE)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("BT2_JOB_INSTANCE")
.modifyColumn("20240601.15", "ERROR_COUNT")
.nonNullable()
.withType(ColumnTypeEnum.INT)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_BINARY_STORAGE_BLOB")
.modifyColumn("20240601.16", "BLOB_SIZE")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
/*
* Add RES_ID to two indexes on HFJ_RES_VER which support history operations.
* This makes server and type level _history work properly on large databases
* on postgres. These are both marked as heavyweightSkipByDefault because the
* necessary reindexing would be very expensive for a rarely used FHIR feature.
*/
version.onTable("HFJ_RES_VER")
.dropIndex("20240601.17", "IDX_RESVER_TYPE_DATE")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.addIndex("20240601.18", "IDX_RESVER_TYPE_DATE")
.unique(false)
.withColumns("RES_TYPE", "RES_UPDATED", "RES_ID")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.dropIndex("20240601.19", "IDX_RESVER_DATE")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.addIndex("20240601.20", "IDX_RESVER_DATE")
.unique(false)
.withColumns("RES_UPDATED", "RES_ID")
.heavyweightSkipByDefault();
} }
protected void init720() { protected void init720() {
@ -162,15 +277,15 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
binaryStorageBlobTable binaryStorageBlobTable
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID") .renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
.getLastAddedTask() .getLastAddedTask()
.ifPresent(BaseTask::doNothing); .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable binaryStorageBlobTable
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE") .renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
.getLastAddedTask() .getLastAddedTask()
.ifPresent(BaseTask::doNothing); .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable binaryStorageBlobTable
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH") .renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH")
.getLastAddedTask() .getLastAddedTask()
.ifPresent(BaseTask::doNothing); .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable binaryStorageBlobTable
.modifyColumn("20240404.4", "BLOB_DATA") .modifyColumn("20240404.4", "BLOB_DATA")
@ -262,7 +377,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Move forced_id constraints to hfj_resource and the new fhir_id column // Move forced_id constraints to hfj_resource and the new fhir_id column
// Note: we leave the HFJ_FORCED_ID.IDX_FORCEDID_TYPE_FID index in place to support old writers for a while. // Note: we leave the HFJ_FORCED_ID.IDX_FORCEDID_TYPE_FID index in place to support old writers for a while.
version.addTask(new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").setDoNothing(true)); version.addTask(
new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").addFlag(TaskFlagEnum.DO_NOTHING));
Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE"); Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
// commented out to make numeric space for the fix task below. // commented out to make numeric space for the fix task below.
@ -331,7 +447,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
} }
// This fix was bad for MSSQL, it has been set to do nothing. // This fix was bad for MSSQL, it has been set to do nothing.
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").setDoNothing(true)); version.addTask(
new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").addFlag(TaskFlagEnum.DO_NOTHING));
// This fix will work for MSSQL or Oracle. // This fix will work for MSSQL or Oracle.
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1")); version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
@ -814,8 +931,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_VER") version.onTable("HFJ_RES_VER")
.modifyColumn("20230421.1", "RES_TEXT_VC") .modifyColumn("20230421.1", "RES_TEXT_VC")
.nullable() .nullable()
.failureAllowed() .withType(ColumnTypeEnum.TEXT)
.withType(ColumnTypeEnum.TEXT); .failureAllowed();
{ {
// add hash_norm to res_id to speed up joins on a second string. // add hash_norm to res_id to speed up joins on a second string.
@ -1751,8 +1868,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_LINK") version.onTable("HFJ_RES_LINK")
.modifyColumn("20210505.1", "SRC_PATH") .modifyColumn("20210505.1", "SRC_PATH")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 500)
.withType(ColumnTypeEnum.STRING, 500); .failureAllowed();
} }
private void init530() { private void init530() {
@ -1813,8 +1930,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
quantityTable quantityTable
.modifyColumn("20210116.1", "SP_VALUE") .modifyColumn("20210116.1", "SP_VALUE")
.nullable() .nullable()
.failureAllowed() .withType(ColumnTypeEnum.DOUBLE)
.withType(ColumnTypeEnum.DOUBLE); .failureAllowed();
// HFJ_RES_LINK // HFJ_RES_LINK
version.onTable("HFJ_RES_LINK") version.onTable("HFJ_RES_LINK")
@ -2011,8 +2128,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_VER") version.onTable("HFJ_RES_VER")
.modifyColumn("20200220.1", "RES_ID") .modifyColumn("20200220.1", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
// //
// Drop unused column // Drop unused column
@ -2168,38 +2285,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_STRING") version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20200420.36", "SP_MISSING") .modifyColumn("20200420.36", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_COORDS") version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20200420.37", "SP_MISSING") .modifyColumn("20200420.37", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_NUMBER") version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20200420.38", "SP_MISSING") .modifyColumn("20200420.38", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN") version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20200420.39", "SP_MISSING") .modifyColumn("20200420.39", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_DATE") version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20200420.40", "SP_MISSING") .modifyColumn("20200420.40", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_URI") version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20200420.41", "SP_MISSING") .modifyColumn("20200420.41", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY") version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20200420.42", "SP_MISSING") .modifyColumn("20200420.42", "SP_MISSING")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.BOOLEAN)
.withType(ColumnTypeEnum.BOOLEAN); .failureAllowed();
// Add support for integer comparisons during day-precision date search. // Add support for integer comparisons during day-precision date search.
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE"); Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
@ -2309,38 +2426,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_NUMBER") version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190920.1", "RES_ID") .modifyColumn("20190920.1", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_COORDS") version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190920.2", "RES_ID") .modifyColumn("20190920.2", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN") version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190920.3", "RES_ID") .modifyColumn("20190920.3", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_STRING") version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190920.4", "RES_ID") .modifyColumn("20190920.4", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_DATE") version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190920.5", "RES_ID") .modifyColumn("20190920.5", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY") version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190920.6", "RES_ID") .modifyColumn("20190920.6", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
version.onTable("HFJ_SPIDX_URI") version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190920.7", "RES_ID") .modifyColumn("20190920.7", "RES_ID")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.LONG)
.withType(ColumnTypeEnum.LONG); .failureAllowed();
// HFJ_SEARCH // HFJ_SEARCH
version.onTable("HFJ_SEARCH") version.onTable("HFJ_SEARCH")
@ -2469,33 +2586,33 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RESOURCE") version.onTable("HFJ_RESOURCE")
.modifyColumn("20191002.1", "RES_TYPE") .modifyColumn("20191002.1", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
version.onTable("HFJ_RES_VER") version.onTable("HFJ_RES_VER")
.modifyColumn("20191002.2", "RES_TYPE") .modifyColumn("20191002.2", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
version.onTable("HFJ_HISTORY_TAG") version.onTable("HFJ_HISTORY_TAG")
.modifyColumn("20191002.3", "RES_TYPE") .modifyColumn("20191002.3", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
version.onTable("HFJ_RES_LINK") version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE") .modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
version.onTable("HFJ_RES_LINK") version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE") .modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
version.onTable("HFJ_RES_TAG") version.onTable("HFJ_RES_TAG")
.modifyColumn("20191002.6", "RES_TYPE") .modifyColumn("20191002.6", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 40)
.withType(ColumnTypeEnum.STRING, 40); .failureAllowed();
// TermConceptDesignation // TermConceptDesignation
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG"); version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");
@ -2765,18 +2882,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_COORDS") version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.9", "RES_TYPE") .modifyColumn("20190814.9", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_DATE") version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190814.10", "RES_TYPE") .modifyColumn("20190814.10", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_STRING") version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190814.11", "RES_TYPE") .modifyColumn("20190814.11", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_STRING") version.onTable("HFJ_SPIDX_STRING")
.addColumn("20190814.12", "HASH_IDENTITY") .addColumn("20190814.12", "HASH_IDENTITY")
.nullable() .nullable()
@ -2788,50 +2905,50 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_COORDS") version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.14", "RES_TYPE") .modifyColumn("20190814.14", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY") version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190814.15", "RES_TYPE") .modifyColumn("20190814.15", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX"); version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX"); version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
version.onTable("HFJ_SPIDX_NUMBER") version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190814.18", "RES_TYPE") .modifyColumn("20190814.18", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN") version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190814.19", "RES_TYPE") .modifyColumn("20190814.19", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_URI") version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.20", "RES_TYPE") .modifyColumn("20190814.20", "RES_TYPE")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 100)
.withType(ColumnTypeEnum.STRING, 100); .failureAllowed();
version.onTable("HFJ_SPIDX_URI") version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.21", "SP_URI") .modifyColumn("20190814.21", "SP_URI")
.nullable() .nullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 254)
.withType(ColumnTypeEnum.STRING, 254); .failureAllowed();
version.onTable("TRM_CODESYSTEM") version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.22", "CODE_SYSTEM_URI") .modifyColumn("20190814.22", "CODE_SYSTEM_URI")
.nonNullable() .nonNullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 200)
.withType(ColumnTypeEnum.STRING, 200); .failureAllowed();
version.onTable("TRM_CODESYSTEM") version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.23", "CS_NAME") .modifyColumn("20190814.23", "CS_NAME")
.nullable() .nullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 200)
.withType(ColumnTypeEnum.STRING, 200); .failureAllowed();
version.onTable("TRM_CODESYSTEM_VER") version.onTable("TRM_CODESYSTEM_VER")
.modifyColumn("20190814.24", "CS_VERSION_ID") .modifyColumn("20190814.24", "CS_VERSION_ID")
.nullable() .nullable()
.failureAllowed() .withType(ColumnTypeEnum.STRING, 200)
.withType(ColumnTypeEnum.STRING, 200); .failureAllowed();
} }
private void init360() { // 20180918 - 20181112 private void init360() { // 20180918 - 20181112

View File

@ -524,8 +524,12 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
String theParamTypeName) { String theParamTypeName) {
Parameters.ParametersParameterComponent retVal = Parameters.ParametersParameterComponent retVal =
super.addIndexValue(theAction, theParent, theParam, theParamTypeName); super.addIndexValue(theAction, theParent, theParam, theParamTypeName);
retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude())); if (theParam.getLatitude() != null) {
retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude())); retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude()));
}
if (theParam.getLongitude() != null) {
retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude()));
}
return retVal; return retVal;
} }
} }

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -45,7 +45,7 @@ public class BinaryStorageEntity {
@Column(name = "RESOURCE_ID", length = 100, nullable = false) @Column(name = "RESOURCE_ID", length = 100, nullable = false)
private String myResourceId; private String myResourceId;
@Column(name = "BLOB_SIZE", nullable = true) @Column(name = "BLOB_SIZE", nullable = false)
private long mySize; private long mySize;
@Column(name = "CONTENT_TYPE", nullable = false, length = 100) @Column(name = "CONTENT_TYPE", nullable = false, length = 100)

View File

@ -60,9 +60,9 @@ import java.util.Collection;
columnNames = {"RES_ID", "RES_VER"}) columnNames = {"RES_ID", "RES_VER"})
}, },
indexes = { indexes = {
@Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"), @Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED,RES_ID"),
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"), @Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
@Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED") @Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED,RES_ID")
}) })
public class ResourceHistoryTable extends BaseHasResource implements Serializable { public class ResourceHistoryTable extends BaseHasResource implements Serializable {
public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER"; public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER";

View File

@ -135,12 +135,14 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable
myIndexString = theIndexString; myIndexString = theIndexString;
} }
@Override
public ResourceTable getResource() { public ResourceTable getResource() {
return myResource; return myResource;
} }
@Override
public void setResource(ResourceTable theResource) { public void setResource(ResourceTable theResource) {
Validate.notNull(theResource); Validate.notNull(theResource, "theResource must not be null");
myResource = theResource; myResource = theResource;
} }

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.IQueryParameterType;
import jakarta.annotation.Nullable;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Embeddable; import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
@ -56,13 +57,11 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_LATITUDE") @Column(name = "SP_LATITUDE", nullable = true)
// @FullTextField public Double myLatitude;
public double myLatitude;
@Column(name = "SP_LONGITUDE") @Column(name = "SP_LONGITUDE", nullable = true)
// @FullTextField public Double myLongitude;
public double myLongitude;
@Id @Id
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS") @SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
@ -162,7 +161,8 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
myId = theId; myId = theId;
} }
public double getLatitude() { @Nullable
public Double getLatitude() {
return myLatitude; return myLatitude;
} }
@ -171,7 +171,8 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
return this; return this;
} }
public double getLongitude() { @Nullable
public Double getLongitude() {
return myLongitude; return myLongitude;
} }

View File

@ -142,7 +142,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private String myHashSha256; private String myHashSha256;
@Column(name = "SP_HAS_LINKS") @Column(name = "SP_HAS_LINKS", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myHasLinks; private boolean myHasLinks;
@ -203,7 +203,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords; private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
@Column(name = "SP_COORDS_PRESENT") @Column(name = "SP_COORDS_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsCoordsPopulated; private boolean myParamsCoordsPopulated;
@ -215,7 +215,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamDate> myParamsDate; private Collection<ResourceIndexedSearchParamDate> myParamsDate;
@Column(name = "SP_DATE_PRESENT") @Column(name = "SP_DATE_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsDatePopulated; private boolean myParamsDatePopulated;
@ -227,7 +227,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
orphanRemoval = false) orphanRemoval = false)
private Collection<ResourceIndexedSearchParamNumber> myParamsNumber; private Collection<ResourceIndexedSearchParamNumber> myParamsNumber;
@Column(name = "SP_NUMBER_PRESENT") @Column(name = "SP_NUMBER_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsNumberPopulated; private boolean myParamsNumberPopulated;
@ -239,7 +239,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity; private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity;
@Column(name = "SP_QUANTITY_PRESENT") @Column(name = "SP_QUANTITY_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsQuantityPopulated; private boolean myParamsQuantityPopulated;
@ -260,7 +260,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
* NOTE : use Boolean class instead of boolean primitive, in order to set the existing rows to null * NOTE : use Boolean class instead of boolean primitive, in order to set the existing rows to null
* since 5.3.0 * since 5.3.0
*/ */
@Column(name = "SP_QUANTITY_NRML_PRESENT") @Column(name = "SP_QUANTITY_NRML_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Boolean myParamsQuantityNormalizedPopulated = Boolean.FALSE; private Boolean myParamsQuantityNormalizedPopulated = Boolean.FALSE;
@ -272,7 +272,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamString> myParamsString; private Collection<ResourceIndexedSearchParamString> myParamsString;
@Column(name = "SP_STRING_PRESENT") @Column(name = "SP_STRING_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsStringPopulated; private boolean myParamsStringPopulated;
@ -284,7 +284,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamToken> myParamsToken; private Collection<ResourceIndexedSearchParamToken> myParamsToken;
@Column(name = "SP_TOKEN_PRESENT") @Column(name = "SP_TOKEN_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsTokenPopulated; private boolean myParamsTokenPopulated;
@ -296,7 +296,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamUri> myParamsUri; private Collection<ResourceIndexedSearchParamUri> myParamsUri;
@Column(name = "SP_URI_PRESENT") @Column(name = "SP_URI_PRESENT", nullable = false)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private boolean myParamsUriPopulated; private boolean myParamsUriPopulated;
@ -404,7 +404,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
private Boolean mySearchUrlPresent = false; private Boolean mySearchUrlPresent = false;
@Version @Version
@Column(name = "RES_VER") @Column(name = "RES_VER", nullable = false)
private long myVersion; private long myVersion;
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY) @OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
@ -21,7 +21,7 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-caching-api</artifactId> <artifactId>hapi-fhir-caching-api</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir-serviceloaders</artifactId> <artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath> <relativePath>../../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId> <artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId> <artifactId>hapi-fhir-spring-boot</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao; import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask; import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -44,6 +45,7 @@ public class HapiMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class); private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class);
private final MigrationTaskList myTaskList = new MigrationTaskList(); private final MigrationTaskList myTaskList = new MigrationTaskList();
private boolean myDryRun; private boolean myDryRun;
private boolean myRunHeavyweightSkippableTasks;
private boolean myNoColumnShrink; private boolean myNoColumnShrink;
private final DriverTypeEnum myDriverType; private final DriverTypeEnum myDriverType;
private final DataSource myDataSource; private final DataSource myDataSource;
@ -69,6 +71,24 @@ public class HapiMigrator {
myDryRun = theDryRun; myDryRun = theDryRun;
} }
/**
* Should we run the tasks marked with {@link ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum#HEAVYWEIGHT_SKIP_BY_DEFAULT}
*
* @since 7.4.0
*/
public boolean isRunHeavyweightSkippableTasks() {
return myRunHeavyweightSkippableTasks;
}
/**
* Should we run the tasks marked with {@link ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum#HEAVYWEIGHT_SKIP_BY_DEFAULT}
*
* @since 7.4.0
*/
public void setRunHeavyweightSkippableTasks(boolean theRunHeavyweightSkippableTasks) {
myRunHeavyweightSkippableTasks = theRunHeavyweightSkippableTasks;
}
public boolean isNoColumnShrink() { public boolean isNoColumnShrink() {
return myNoColumnShrink; return myNoColumnShrink;
} }
@ -131,14 +151,27 @@ public class HapiMigrator {
try (DriverTypeEnum.ConnectionProperties connectionProperties = try (DriverTypeEnum.ConnectionProperties connectionProperties =
getDriverType().newConnectionProperties(getDataSource())) { getDriverType().newConnectionProperties(getDataSource())) {
newTaskList.forEach(next -> { if (!isRunHeavyweightSkippableTasks()) {
newTaskList.removeIf(BaseTask::isHeavyweightSkippableTask);
}
boolean initializedSchema = false;
for (BaseTask next : newTaskList) {
if (initializedSchema && !next.hasFlag(TaskFlagEnum.RUN_DURING_SCHEMA_INITIALIZATION)) {
ourLog.info("Skipping task {} because schema is being initialized", next.getMigrationVersion());
recordTaskAsCompletedIfNotDryRun(next, 0L, true);
continue;
}
next.setDriverType(getDriverType()); next.setDriverType(getDriverType());
next.setDryRun(isDryRun()); next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink()); next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties); next.setConnectionProperties(connectionProperties);
executeTask(next, retval); executeTask(next, retval);
});
initializedSchema |= next.initializedSchema();
}
} }
} catch (Exception e) { } catch (Exception e) {
ourLog.error("Migration failed", e); ourLog.error("Migration failed", e);
@ -167,13 +200,13 @@ public class HapiMigrator {
} }
preExecute(theTask); preExecute(theTask);
theTask.execute(); theTask.execute();
postExecute(theTask, sw, true); recordTaskAsCompletedIfNotDryRun(theTask, sw.getMillis(), true);
theMigrationResult.changes += theTask.getChangesCount(); theMigrationResult.changes += theTask.getChangesCount();
theMigrationResult.executedStatements.addAll(theTask.getExecutedStatements()); theMigrationResult.executedStatements.addAll(theTask.getExecutedStatements());
theMigrationResult.succeededTasks.add(theTask); theMigrationResult.succeededTasks.add(theTask);
} catch (SQLException | HapiMigrationException e) { } catch (SQLException | HapiMigrationException e) {
theMigrationResult.failedTasks.add(theTask); theMigrationResult.failedTasks.add(theTask);
postExecute(theTask, sw, false); recordTaskAsCompletedIfNotDryRun(theTask, sw.getMillis(), false);
String description = theTask.getDescription(); String description = theTask.getDescription();
if (isBlank(description)) { if (isBlank(description)) {
description = theTask.getClass().getSimpleName(); description = theTask.getClass().getSimpleName();
@ -187,9 +220,9 @@ public class HapiMigrator {
myCallbacks.forEach(action -> action.preExecution(theTask)); myCallbacks.forEach(action -> action.preExecution(theTask));
} }
private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) { private void recordTaskAsCompletedIfNotDryRun(BaseTask theNext, long theExecutionMillis, boolean theSuccess) {
if (!theNext.isDryRun()) { if (!theNext.isDryRun()) {
myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theStopWatch.getMillis()), theSuccess); myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theExecutionMillis), theSuccess);
} }
} }
@ -211,7 +244,7 @@ public class HapiMigrator {
} }
public void setCallbacks(@Nonnull List<IHapiMigrationCallback> theCallbacks) { public void setCallbacks(@Nonnull List<IHapiMigrationCallback> theCallbacks) {
Validate.notNull(theCallbacks); Validate.notNull(theCallbacks, "theCallbacks must not be null");
myCallbacks = theCallbacks; myCallbacks = theCallbacks;
} }

View File

@ -29,6 +29,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class MigrationTaskList implements Iterable<BaseTask> { public class MigrationTaskList implements Iterable<BaseTask> {
@ -95,4 +96,12 @@ public class MigrationTaskList implements Iterable<BaseTask> {
.reduce((first, second) -> second) .reduce((first, second) -> second)
.orElse(null); .orElse(null);
} }
public void removeIf(Predicate<BaseTask> theFilter) {
myTasks.removeIf(theFilter);
}
public BaseTask[] toTaskArray() {
return myTasks.toArray(new BaseTask[0]);
}
} }

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.migrate; package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -49,7 +50,7 @@ public class MigrationTaskSkipper {
for (BaseTask task : theTasks) { for (BaseTask task : theTasks) {
if (skippedVersionSet.contains(task.getMigrationVersion())) { if (skippedVersionSet.contains(task.getMigrationVersion())) {
ourLog.info("Will skip {}: {}", task.getMigrationVersion(), task.getDescription()); ourLog.info("Will skip {}: {}", task.getMigrationVersion(), task.getDescription());
task.setDoNothing(true); task.addFlag(TaskFlagEnum.DO_NOTHING);
} }
} }
} }

View File

@ -35,12 +35,6 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
return new AddColumnTask(null, null, ColumnNameCase.ALL_LOWER, theColumnDriverMappingOverrides); return new AddColumnTask(null, null, ColumnNameCase.ALL_LOWER, theColumnDriverMappingOverrides);
} }
public AddColumnTask() {
this(null, null);
setDryRun(true);
myCheckForExistingTables = false;
}
public AddColumnTask(String theProductVersion, String theSchemaVersion) { public AddColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion); super(theProductVersion, theSchemaVersion);
} }
@ -84,6 +78,7 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
break; break;
case DERBY_EMBEDDED: case DERBY_EMBEDDED:
case POSTGRES_9_4: case POSTGRES_9_4:
case COCKROACHDB_21_1:
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement; sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement;
break; break;
case MSSQL_2012: case MSSQL_2012:

View File

@ -56,7 +56,7 @@ public class AddTableByColumnTask extends BaseTableTask {
this(theProductVersion, theSchemaVersion, null); this(theProductVersion, theSchemaVersion, null);
} }
private AddTableByColumnTask( public AddTableByColumnTask(
String theProductVersion, String theSchemaVersion, Comparator<AddColumnTask> theColumnSortingRules) { String theProductVersion, String theSchemaVersion, Comparator<AddColumnTask> theColumnSortingRules) {
super(theProductVersion, theSchemaVersion); super(theProductVersion, theSchemaVersion);
myColumnSortingRules = theColumnSortingRules; myColumnSortingRules = theColumnSortingRules;

View File

@ -46,6 +46,9 @@ public class ArbitrarySqlTask extends BaseTask {
private String myExecuteOnlyIfTableExists; private String myExecuteOnlyIfTableExists;
private List<TableAndColumn> myConditionalOnExistenceOf = new ArrayList<>(); private List<TableAndColumn> myConditionalOnExistenceOf = new ArrayList<>();
/**
* Constructor
*/
public ArbitrarySqlTask(VersionEnum theRelease, String theVersion, String theTableName, String theDescription) { public ArbitrarySqlTask(VersionEnum theRelease, String theVersion, String theTableName, String theDescription) {
super(theRelease.toString(), theVersion); super(theRelease.toString(), theVersion);
myTableName = theTableName; myTableName = theTableName;

View File

@ -22,10 +22,14 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException; import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.system.HapiSystemProperties;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.flywaydb.core.api.MigrationVersion; import org.flywaydb.core.api.MigrationVersion;
import org.intellij.lang.annotations.Language; import org.intellij.lang.annotations.Language;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -38,6 +42,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -51,11 +56,42 @@ public abstract class BaseTask {
private static final Pattern versionPattern = Pattern.compile(MIGRATION_VERSION_PATTERN); private static final Pattern versionPattern = Pattern.compile(MIGRATION_VERSION_PATTERN);
private final String myProductVersion; private final String myProductVersion;
private final String mySchemaVersion; private final String mySchemaVersion;
private final List<ExecuteTaskPrecondition> myPreconditions = new ArrayList<>();
private final EnumSet<TaskFlagEnum> myFlags = EnumSet.noneOf(TaskFlagEnum.class);
private final List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
/**
* Whether to check for existing tables
* before generating SQL
*/
protected boolean myCheckForExistingTables = true;
/**
* Whether to generate the SQL in a 'readable format'
*/
protected boolean myPrettyPrint = false;
private DriverTypeEnum.ConnectionProperties myConnectionProperties; private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private DriverTypeEnum myDriverType; private DriverTypeEnum myDriverType;
private String myDescription; private String myDescription;
private Integer myChangesCount = 0; private Integer myChangesCount = 0;
private boolean myDryRun; private boolean myDryRun;
private boolean myTransactional = true;
private Set<DriverTypeEnum> myOnlyAppliesToPlatforms = new HashSet<>();
private boolean myNoColumnShrink;
protected BaseTask(String theProductVersion, String theSchemaVersion) {
myProductVersion = theProductVersion;
mySchemaVersion = theSchemaVersion;
}
/**
* Adds a flag if it's not already present, otherwise this call is ignored.
*
* @param theFlag The flag, must not be null
*/
public BaseTask addFlag(@Nonnull TaskFlagEnum theFlag) {
myFlags.add(theFlag);
return this;
}
/** /**
* Some migrations can not be run in a transaction. * Some migrations can not be run in a transaction.
@ -65,48 +101,12 @@ public abstract class BaseTask {
myTransactional = theTransactional; myTransactional = theTransactional;
} }
private boolean myTransactional = true;
private boolean myDoNothing;
private List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
private Set<DriverTypeEnum> myOnlyAppliesToPlatforms = new HashSet<>();
private boolean myNoColumnShrink;
private boolean myFailureAllowed;
private boolean myRunDuringSchemaInitialization;
/**
* Whether or not to check for existing tables
* before generating SQL
*/
protected boolean myCheckForExistingTables = true;
/**
* Whether or not to generate the SQL in a 'readable format'
*/
protected boolean myPrettyPrint = false;
protected BaseTask(String theProductVersion, String theSchemaVersion) {
myProductVersion = theProductVersion;
mySchemaVersion = theSchemaVersion;
}
public boolean isRunDuringSchemaInitialization() {
return myRunDuringSchemaInitialization;
}
public void setPrettyPrint(boolean thePrettyPrint) { public void setPrettyPrint(boolean thePrettyPrint) {
myPrettyPrint = thePrettyPrint; myPrettyPrint = thePrettyPrint;
} }
/**
* Should this task run even if we're doing the very first initialization of an empty schema. By
* default we skip most tasks during that pass, since they just take up time and the
* schema should be fully initialized by the {@link InitializeSchemaTask}
*/
public void setRunDuringSchemaInitialization(boolean theRunDuringSchemaInitialization) {
myRunDuringSchemaInitialization = theRunDuringSchemaInitialization;
}
public void setOnlyAppliesToPlatforms(Set<DriverTypeEnum> theOnlyAppliesToPlatforms) { public void setOnlyAppliesToPlatforms(Set<DriverTypeEnum> theOnlyAppliesToPlatforms) {
Validate.notNull(theOnlyAppliesToPlatforms); Validate.notNull(theOnlyAppliesToPlatforms, "theOnlyAppliesToPlatforms must not be null");
myOnlyAppliesToPlatforms = theOnlyAppliesToPlatforms; myOnlyAppliesToPlatforms = theOnlyAppliesToPlatforms;
} }
@ -188,7 +188,7 @@ public abstract class BaseTask {
private Integer doExecuteSqlList(List<String> theSqlStatements) { private Integer doExecuteSqlList(List<String> theSqlStatements) {
int changesCount = 0; int changesCount = 0;
for (String nextSql : theSqlStatements) { for (@Language("SQL") String nextSql : theSqlStatements) {
changesCount += doExecuteSql(nextSql); changesCount += doExecuteSql(nextSql);
} }
@ -206,7 +206,7 @@ public abstract class BaseTask {
} }
return changesCount; return changesCount;
} catch (DataAccessException e) { } catch (DataAccessException e) {
if (myFailureAllowed) { if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion()); ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e); ourLog.debug("Error was: {}", e.getMessage(), e);
return 0; return 0;
@ -219,7 +219,7 @@ public abstract class BaseTask {
protected void captureExecutedStatement( protected void captureExecutedStatement(
String theTableName, @Language("SQL") String theSql, Object... theArguments) { String theTableName, @Language("SQL") String theSql, Object... theArguments) {
myExecutedStatements.add(new ExecutedStatement(theTableName, theSql, theArguments)); myExecutedStatements.add(new ExecutedStatement(mySchemaVersion, theTableName, theSql, theArguments));
} }
public DriverTypeEnum.ConnectionProperties getConnectionProperties() { public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
@ -250,10 +250,8 @@ public abstract class BaseTask {
return getConnectionProperties().newJdbcTemplate(); return getConnectionProperties().newJdbcTemplate();
} }
private final List<ExecuteTaskPrecondition> myPreconditions = new ArrayList<>();
public void execute() throws SQLException { public void execute() throws SQLException {
if (myDoNothing) { if (myFlags.contains(TaskFlagEnum.DO_NOTHING)) {
ourLog.info("Skipping stubbed task: {}", getDescription()); ourLog.info("Skipping stubbed task: {}", getDescription());
return; return;
} }
@ -278,14 +276,6 @@ public abstract class BaseTask {
protected abstract void doExecute() throws SQLException; protected abstract void doExecute() throws SQLException;
protected boolean isFailureAllowed() {
return myFailureAllowed;
}
public void setFailureAllowed(boolean theFailureAllowed) {
myFailureAllowed = theFailureAllowed;
}
public String getMigrationVersion() { public String getMigrationVersion() {
String releasePart = myProductVersion; String releasePart = myProductVersion;
if (releasePart.startsWith("V")) { if (releasePart.startsWith("V")) {
@ -296,6 +286,7 @@ public abstract class BaseTask {
return migrationVersion.getVersion(); return migrationVersion.getVersion();
} }
@SuppressWarnings("StringConcatenationArgumentToLogCall")
protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) { protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) {
theLog.info(getMigrationVersion() + ": " + theFormattedMessage, theArguments); theLog.info(getMigrationVersion() + ": " + theFormattedMessage, theArguments);
} }
@ -308,23 +299,6 @@ public abstract class BaseTask {
} }
} }
public void doNothing() {
setDoNothing(true);
}
public void failureAllowed() {
setFailureAllowed(true);
}
public boolean isDoNothing() {
return myDoNothing;
}
public BaseTask setDoNothing(boolean theDoNothing) {
myDoNothing = theDoNothing;
return this;
}
public void addPrecondition(ExecuteTaskPrecondition thePrecondition) { public void addPrecondition(ExecuteTaskPrecondition thePrecondition) {
myPreconditions.add(thePrecondition); myPreconditions.add(thePrecondition);
} }
@ -343,7 +317,6 @@ public abstract class BaseTask {
if (theObject == null || getClass().equals(theObject.getClass()) == false) { if (theObject == null || getClass().equals(theObject.getClass()) == false) {
return false; return false;
} }
@SuppressWarnings("unchecked")
BaseTask otherObject = (BaseTask) theObject; BaseTask otherObject = (BaseTask) theObject;
EqualsBuilder b = new EqualsBuilder(); EqualsBuilder b = new EqualsBuilder();
@ -357,17 +330,35 @@ public abstract class BaseTask {
return false; return false;
} }
public boolean isDoNothing() {
return myFlags.contains(TaskFlagEnum.DO_NOTHING);
}
public boolean isHeavyweightSkippableTask() {
return myFlags.contains(TaskFlagEnum.HEAVYWEIGHT_SKIP_BY_DEFAULT);
}
public boolean hasFlag(TaskFlagEnum theFlag) {
return myFlags.contains(theFlag);
}
public static class ExecutedStatement { public static class ExecutedStatement {
private final String mySql; private final String mySql;
private final List<Object> myArguments; private final List<Object> myArguments;
private final String myTableName; private final String myTableName;
private final String mySchemaVersion;
public ExecutedStatement(String theDescription, String theSql, Object[] theArguments) { public ExecutedStatement(String theSchemaVersion, String theDescription, String theSql, Object[] theArguments) {
mySchemaVersion = theSchemaVersion;
myTableName = theDescription; myTableName = theDescription;
mySql = theSql; mySql = theSql;
myArguments = theArguments != null ? Arrays.asList(theArguments) : Collections.emptyList(); myArguments = theArguments != null ? Arrays.asList(theArguments) : Collections.emptyList();
} }
public String getSchemaVersion() {
return mySchemaVersion;
}
public String getTableName() { public String getTableName() {
return myTableName; return myTableName;
} }
@ -379,5 +370,14 @@ public abstract class BaseTask {
public List<Object> getArguments() { public List<Object> getArguments() {
return myArguments; return myArguments;
} }
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("tableName", myTableName)
.append("sql", mySql)
.append("arguments", myArguments)
.toString();
}
} }
} }

View File

@ -113,6 +113,13 @@ public class ForceIdMigrationFixTask extends BaseTask {
switch (getDriverType()) { switch (getDriverType()) {
case MSSQL_2012: case MSSQL_2012:
return " where (fhir_id is null or DATALENGTH(fhir_id) > LEN(fhir_id)) "; return " where (fhir_id is null or DATALENGTH(fhir_id) > LEN(fhir_id)) ";
case H2_EMBEDDED:
case DERBY_EMBEDDED:
case MARIADB_10_1:
case MYSQL_5_7:
case POSTGRES_9_4:
case ORACLE_12C:
case COCKROACHDB_21_1:
default: default:
return " where (fhir_id is null or fhir_id <> trim(fhir_id)) "; return " where (fhir_id is null or fhir_id <> trim(fhir_id)) ";
} }

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider; import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -44,11 +45,7 @@ public class InitializeSchemaTask extends BaseTask {
super(theProductVersion, theSchemaVersion); super(theProductVersion, theSchemaVersion);
mySchemaInitializationProvider = theSchemaInitializationProvider; mySchemaInitializationProvider = theSchemaInitializationProvider;
setDescription(DESCRIPTION_PREFIX + mySchemaInitializationProvider.getSchemaDescription()); setDescription(DESCRIPTION_PREFIX + mySchemaInitializationProvider.getSchemaDescription());
} addFlag(TaskFlagEnum.RUN_DURING_SCHEMA_INITIALIZATION);
@Override
public boolean isRunDuringSchemaInitialization() {
return true;
} }
@Override @Override
@ -102,12 +99,12 @@ public class InitializeSchemaTask extends BaseTask {
@Override @Override
protected void generateEquals(EqualsBuilder theBuilder, BaseTask theOtherObject) { protected void generateEquals(EqualsBuilder theBuilder, BaseTask theOtherObject) {
InitializeSchemaTask otherObject = (InitializeSchemaTask) theOtherObject; InitializeSchemaTask otherObject = (InitializeSchemaTask) theOtherObject;
theBuilder.append(mySchemaInitializationProvider, otherObject.mySchemaInitializationProvider); theBuilder.append(getSchemaInitializationProvider(), otherObject.getSchemaInitializationProvider());
} }
@Override @Override
protected void generateHashCode(HashCodeBuilder theBuilder) { protected void generateHashCode(HashCodeBuilder theBuilder) {
theBuilder.append(mySchemaInitializationProvider); theBuilder.append(getSchemaInitializationProvider());
} }
public ISchemaInitializationProvider getSchemaInitializationProvider() { public ISchemaInitializationProvider getSchemaInitializationProvider() {

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.intellij.lang.annotations.Language; import org.intellij.lang.annotations.Language;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -159,8 +160,8 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask {
throw new IllegalStateException(Msg.code(67) + "Dont know how to handle " + getDriverType()); throw new IllegalStateException(Msg.code(67) + "Dont know how to handle " + getDriverType());
} }
if (!isFailureAllowed() && isShrinkOnly) { if (isShrinkOnly) {
setFailureAllowed(true); addFlag(TaskFlagEnum.FAILURE_ALLOWED);
} }
logInfo(ourLog, "Updating column {} on table {} to type {}", getColumnName(), getTableName(), type); logInfo(ourLog, "Updating column {} on table {} to type {}", getColumnName(), getTableName(), type);

View File

@ -99,6 +99,7 @@ public class RenameTableTask extends BaseTableTask {
return retVal; return retVal;
} }
@Override
protected void generateHashCode(HashCodeBuilder theBuilder) { protected void generateHashCode(HashCodeBuilder theBuilder) {
super.generateHashCode(theBuilder); super.generateHashCode(theBuilder);
theBuilder.append(myOldTableName); theBuilder.append(myOldTableName);

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider; import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import ca.uhn.fhir.util.ClasspathUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
@ -71,11 +72,7 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid
String initScript = mySchemaFileClassPath + "/" + getInitScript(theDriverType); String initScript = mySchemaFileClassPath + "/" + getInitScript(theDriverType);
try { try {
InputStream sqlFileInputStream = SchemaInitializationProvider.class.getResourceAsStream(initScript); InputStream sqlFileInputStream = ClasspathUtil.loadResourceAsStream(initScript);
if (sqlFileInputStream == null) {
throw new ConfigurationException(
Msg.code(49) + "Schema initialization script " + initScript + " not found on classpath");
}
// Assumes no escaped semicolons... // Assumes no escaped semicolons...
String sqlString = IOUtils.toString(sqlFileInputStream, Charsets.UTF_8); String sqlString = IOUtils.toString(sqlFileInputStream, Charsets.UTF_8);
parseSqlFileIntoIndividualStatements(theDriverType, retval, sqlString); parseSqlFileIntoIndividualStatements(theDriverType, retval, sqlString);
@ -91,7 +88,7 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid
String sqlString = theSqlString.replaceAll("--.*", ""); String sqlString = theSqlString.replaceAll("--.*", "");
String sqlStringNoComments = preProcessSqlString(theDriverType, sqlString); String sqlStringNoComments = preProcessSqlString(theDriverType, sqlString);
String[] statements = sqlStringNoComments.split("\\;"); String[] statements = sqlStringNoComments.split(";");
for (String statement : statements) { for (String statement : statements) {
String cleanedStatement = preProcessSqlStatement(theDriverType, statement); String cleanedStatement = preProcessSqlStatement(theDriverType, statement);
if (!isBlank(cleanedStatement)) { if (!isBlank(cleanedStatement)) {

View File

@ -85,18 +85,17 @@ public class Builder {
} }
public BuilderCompleteTask executeRawSql(String theVersion, @Language("SQL") String theSql) { public BuilderCompleteTask executeRawSql(String theVersion, @Language("SQL") String theSql) {
ExecuteRawSqlTask task = executeRawSqlOptional(false, theVersion, theSql); ExecuteRawSqlTask task = executeRawSqlOptional(theVersion, theSql);
return new BuilderCompleteTask(task); return new BuilderCompleteTask(task);
} }
public void executeRawSqlStub(String theVersion, @Language("SQL") String theSql) { public void executeRawSqlStub(String theVersion, @Language("SQL") String theSql) {
executeRawSqlOptional(true, theVersion, theSql); BuilderCompleteTask task = executeRawSql(theVersion, theSql);
task.withFlag(TaskFlagEnum.DO_NOTHING);
} }
private ExecuteRawSqlTask executeRawSqlOptional( private ExecuteRawSqlTask executeRawSqlOptional(String theVersion, @Language("SQL") String theSql) {
boolean theDoNothing, String theVersion, @Language("SQL") String theSql) {
ExecuteRawSqlTask task = new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql); ExecuteRawSqlTask task = new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql);
task.setDoNothing(theDoNothing);
mySink.addTask(task); mySink.addTask(task);
return task; return task;
} }
@ -172,10 +171,10 @@ public class Builder {
addTask(task); addTask(task);
} }
public DropIdGeneratorTask dropIdGenerator(String theVersion, String theIdGeneratorName) { public BuilderCompleteTask dropIdGenerator(String theVersion, String theIdGeneratorName) {
DropIdGeneratorTask task = new DropIdGeneratorTask(myRelease, theVersion, theIdGeneratorName); DropIdGeneratorTask task = new DropIdGeneratorTask(myRelease, theVersion, theIdGeneratorName);
addTask(task); addTask(task);
return task; return new BuilderCompleteTask(task);
} }
public void addNop(String theVersion) { public void addNop(String theVersion) {
@ -199,7 +198,7 @@ public class Builder {
} }
public BuilderCompleteTask dropIndex(String theVersion, String theIndexName) { public BuilderCompleteTask dropIndex(String theVersion, String theIndexName) {
BaseTask task = dropIndexOptional(false, theVersion, theIndexName); BaseTask task = dropIndexOptional(theVersion, theIndexName);
return new BuilderCompleteTask(task); return new BuilderCompleteTask(task);
} }
@ -207,20 +206,20 @@ public class Builder {
* Drop index without taking write lock on PG, Oracle, MSSQL. * Drop index without taking write lock on PG, Oracle, MSSQL.
*/ */
public BuilderCompleteTask dropIndexOnline(String theVersion, String theIndexName) { public BuilderCompleteTask dropIndexOnline(String theVersion, String theIndexName) {
DropIndexTask task = dropIndexOptional(false, theVersion, theIndexName); DropIndexTask task = dropIndexOptional(theVersion, theIndexName);
task.setOnline(true); task.setOnline(true);
return new BuilderCompleteTask(task); return new BuilderCompleteTask(task);
} }
public void dropIndexStub(String theVersion, String theIndexName) { public void dropIndexStub(String theVersion, String theIndexName) {
dropIndexOptional(true, theVersion, theIndexName); DropIndexTask task = dropIndexOptional(theVersion, theIndexName);
task.addFlag(TaskFlagEnum.DO_NOTHING);
} }
private DropIndexTask dropIndexOptional(boolean theDoNothing, String theVersion, String theIndexName) { private DropIndexTask dropIndexOptional(String theVersion, String theIndexName) {
DropIndexTask task = new DropIndexTask(myRelease, theVersion); DropIndexTask task = new DropIndexTask(myRelease, theVersion);
task.setIndexName(theIndexName); task.setIndexName(theIndexName);
task.setTableName(myTableName); task.setTableName(myTableName);
task.setDoNothing(theDoNothing);
addTask(task); addTask(task);
return task; return task;
} }
@ -230,24 +229,24 @@ public class Builder {
*/ */
@Deprecated @Deprecated
public void renameIndex(String theVersion, String theOldIndexName, String theNewIndexName) { public void renameIndex(String theVersion, String theOldIndexName, String theNewIndexName) {
renameIndexOptional(false, theVersion, theOldIndexName, theNewIndexName); renameIndexOptional(theVersion, theOldIndexName, theNewIndexName);
} }
/** /**
* @deprecated Do not rename indexes - It is too hard to figure out what happened if something goes wrong * @deprecated Do not rename indexes - It is too hard to figure out what happened if something goes wrong
*/ */
public void renameIndexStub(String theVersion, String theOldIndexName, String theNewIndexName) { public void renameIndexStub(String theVersion, String theOldIndexName, String theNewIndexName) {
renameIndexOptional(true, theVersion, theOldIndexName, theNewIndexName); RenameIndexTask task = renameIndexOptional(theVersion, theOldIndexName, theNewIndexName);
task.addFlag(TaskFlagEnum.DO_NOTHING);
} }
private void renameIndexOptional( private RenameIndexTask renameIndexOptional(String theVersion, String theOldIndexName, String theNewIndexName) {
boolean theDoNothing, String theVersion, String theOldIndexName, String theNewIndexName) {
RenameIndexTask task = new RenameIndexTask(myRelease, theVersion); RenameIndexTask task = new RenameIndexTask(myRelease, theVersion);
task.setOldIndexName(theOldIndexName); task.setOldIndexName(theOldIndexName);
task.setNewIndexName(theNewIndexName); task.setNewIndexName(theNewIndexName);
task.setTableName(myTableName); task.setTableName(myTableName);
task.setDoNothing(theDoNothing);
addTask(task); addTask(task);
return task;
} }
public void dropThisTable(String theVersion) { public void dropThisTable(String theVersion) {
@ -388,27 +387,22 @@ public class Builder {
} }
public void withColumnsStub(String... theColumnNames) { public void withColumnsStub(String... theColumnNames) {
withColumnsOptional(true, theColumnNames); BuilderCompleteTask task = withColumns(theColumnNames);
task.withFlag(TaskFlagEnum.DO_NOTHING);
} }
public BuilderCompleteTask withColumns(String... theColumnNames) { public BuilderCompleteTask withColumns(String... theColumnNames) {
BaseTask task = withColumnsOptional(false, theColumnNames);
return new BuilderCompleteTask(task);
}
private AddIndexTask withColumnsOptional(boolean theDoNothing, String... theColumnNames) {
AddIndexTask task = new AddIndexTask(myRelease, myVersion); AddIndexTask task = new AddIndexTask(myRelease, myVersion);
task.setTableName(myTableName); task.setTableName(myTableName);
task.setIndexName(myIndexName); task.setIndexName(myIndexName);
task.setUnique(myUnique); task.setUnique(myUnique);
task.setColumns(theColumnNames); task.setColumns(theColumnNames);
task.setDoNothing(theDoNothing);
task.setOnline(myOnline); task.setOnline(myOnline);
if (myIncludeColumns != null) { if (myIncludeColumns != null) {
task.setIncludeColumns(myIncludeColumns); task.setIncludeColumns(myIncludeColumns);
} }
addTask(task); addTask(task);
return task; return new BuilderCompleteTask(task);
} }
public BuilderAddIndexUnique includeColumns(String... theIncludeColumns) { public BuilderAddIndexUnique includeColumns(String... theIncludeColumns) {
@ -453,18 +447,17 @@ public class Builder {
public class BuilderModifyColumnWithNameAndNullable { public class BuilderModifyColumnWithNameAndNullable {
private final String myVersion; private final String myVersion;
private final boolean myNullable; private final boolean myNullable;
private boolean myFailureAllowed;
public BuilderModifyColumnWithNameAndNullable(String theVersion, boolean theNullable) { public BuilderModifyColumnWithNameAndNullable(String theVersion, boolean theNullable) {
myVersion = theVersion; myVersion = theVersion;
myNullable = theNullable; myNullable = theNullable;
} }
public void withType(ColumnTypeEnum theColumnType) { public BuilderCompleteTask withType(ColumnTypeEnum theColumnType) {
withType(theColumnType, null); return withType(theColumnType, null);
} }
public void withType(ColumnTypeEnum theColumnType, Integer theLength) { public BuilderCompleteTask withType(ColumnTypeEnum theColumnType, Integer theLength) {
if (theColumnType == ColumnTypeEnum.STRING) { if (theColumnType == ColumnTypeEnum.STRING) {
if (theLength == null || theLength == 0) { if (theLength == null || theLength == 0) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
@ -478,6 +471,7 @@ public class Builder {
} }
ModifyColumnTask task = new ModifyColumnTask(myRelease, myVersion); ModifyColumnTask task = new ModifyColumnTask(myRelease, myVersion);
task.setColumnName(myColumnName); task.setColumnName(myColumnName);
task.setTableName(myTableName); task.setTableName(myTableName);
if (theLength != null) { if (theLength != null) {
@ -485,13 +479,8 @@ public class Builder {
} }
task.setNullable(myNullable); task.setNullable(myNullable);
task.setColumnType(theColumnType); task.setColumnType(theColumnType);
task.setFailureAllowed(myFailureAllowed);
addTask(task); addTask(task);
} return new BuilderCompleteTask(task);
public BuilderModifyColumnWithNameAndNullable failureAllowed() {
myFailureAllowed = true;
return this;
} }
} }
} }
@ -596,12 +585,12 @@ public class Builder {
} }
public BuilderCompleteTask failureAllowed() { public BuilderCompleteTask failureAllowed() {
myTask.setFailureAllowed(true); myTask.addFlag(TaskFlagEnum.FAILURE_ALLOWED);
return this; return this;
} }
public BuilderCompleteTask doNothing() { public BuilderCompleteTask doNothing() {
myTask.setDoNothing(true); myTask.addFlag(TaskFlagEnum.DO_NOTHING);
return this; return this;
} }
@ -646,7 +635,7 @@ public class Builder {
} }
public BuilderCompleteTask runEvenDuringSchemaInitialization() { public BuilderCompleteTask runEvenDuringSchemaInitialization() {
myTask.setRunDuringSchemaInitialization(true); myTask.addFlag(TaskFlagEnum.RUN_DURING_SCHEMA_INITIALIZATION);
return this; return this;
} }
@ -654,6 +643,16 @@ public class Builder {
myTask.setTransactional(theFlag); myTask.setTransactional(theFlag);
return this; return this;
} }
public BuilderCompleteTask heavyweightSkipByDefault() {
myTask.addFlag(TaskFlagEnum.HEAVYWEIGHT_SKIP_BY_DEFAULT);
return this;
}
public BuilderCompleteTask withFlag(TaskFlagEnum theFlag) {
myTask.addFlag(theFlag);
return this;
}
} }
public class BuilderAddTableRawSql { public class BuilderAddTableRawSql {
@ -707,9 +706,8 @@ public class Builder {
} }
} }
public BuilderAddTableByColumns failureAllowed() { public BuilderCompleteTask withFlags() {
myTask.setFailureAllowed(true); return new BuilderCompleteTask(myTask);
return this;
} }
} }

View File

@ -0,0 +1,65 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate.tasks.api;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
public enum TaskFlagEnum {
/**
* This task should be skipped by default on live system migrations. Use this flag
* in cases where the benefit of applying a migration doesn't justify the effort, and
* only use this flag in cases where skipping the task will not leave the system in
* an unusable state.
* <p>
* This flag is intended for situations where a change to the schema would be useful
* to add to the default initialized schema, but would not be worth the effort of
* applying in-place to large databases due to the outsized effort it would take.
* </p>
* <p>
* USE THIS FLAG WITH CAUTION: It means that the migrator unit tests will execute the
* task, but live migrations probably won't. So think long and hard before you set it!
* </p>
*/
HEAVYWEIGHT_SKIP_BY_DEFAULT,
/**
* Should this task run even if we're doing the very first initialization of an empty schema. By
* default, we skip most tasks during that pass, since they just take up time and the
* schema should be fully initialized by the {@link InitializeSchemaTask}
*/
RUN_DURING_SCHEMA_INITIALIZATION,
/**
* This task is allowed to fail, and failure won't abort the migration. Upon any
* detected failures, the task will still be recorded as having been successfully
* complected. A failure in this context means that the SQL statement(s) being
* executed returned a non-successful response, or timed out.
*/
FAILURE_ALLOWED,
/**
* This task should not actually do anything, and will always be recorded as
* a success. Use this flag for tasks that are no longer needed, e.g. because
* they are superseded by later tasks or because they turned out to be a bad
* idea.
*/
DO_NOTHING
}

View File

@ -128,7 +128,7 @@ class HapiMigrationStorageSvcTest extends BaseMigrationTest {
return taskList; return taskList;
} }
public Builder forVersion(MigrationTaskList theTaskList) { public static Builder forVersion(MigrationTaskList theTaskList) {
BaseMigrationTasks.IAcceptsTasks sink = theTask -> { BaseMigrationTasks.IAcceptsTasks sink = theTask -> {
theTask.validate(); theTask.validate();
theTaskList.add(theTask); theTaskList.add(theTask);

View File

@ -3,7 +3,11 @@ package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao; import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.NopTask; import ca.uhn.fhir.jpa.migrate.taskdef.NopTask;
import ca.uhn.fhir.jpa.migrate.tasks.SchemaInitializationProvider;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.test.concurrency.IPointcutLatch; import ca.uhn.test.concurrency.IPointcutLatch;
import ca.uhn.test.concurrency.PointcutLatch; import ca.uhn.test.concurrency.PointcutLatch;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
@ -13,6 +17,8 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
@ -23,12 +29,15 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
@SuppressWarnings("SqlDialectInspection")
class HapiMigratorIT { class HapiMigratorIT {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigratorIT.class); private static final Logger ourLog = LoggerFactory.getLogger(HapiMigratorIT.class);
private static final String MIGRATION_TABLENAME = "TEST_MIGRATOR_TABLE"; private static final String MIGRATION_TABLENAME = "TEST_MIGRATOR_TABLE";
@ -42,6 +51,7 @@ class HapiMigratorIT {
HapiMigrator migrator = buildMigrator(); HapiMigrator migrator = buildMigrator();
migrator.createMigrationTableIfRequired(); migrator.createMigrationTableIfRequired();
Integer count = myJdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + MIGRATION_TABLENAME, Integer.class); Integer count = myJdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + MIGRATION_TABLENAME, Integer.class);
assertNotNull(count);
assertTrue(count > 0); assertTrue(count > 0);
HapiMigrationDao migrationDao = new HapiMigrationDao(myDataSource, DriverTypeEnum.H2_EMBEDDED, MIGRATION_TABLENAME); HapiMigrationDao migrationDao = new HapiMigrationDao(myDataSource, DriverTypeEnum.H2_EMBEDDED, MIGRATION_TABLENAME);
myMigrationStorageSvc = new HapiMigrationStorageSvc(migrationDao); myMigrationStorageSvc = new HapiMigrationStorageSvc(migrationDao);
@ -56,6 +66,62 @@ class HapiMigratorIT {
System.clearProperty(HapiMigrationLock.CLEAR_LOCK_TABLE_WITH_DESCRIPTION); System.clearProperty(HapiMigrationLock.CLEAR_LOCK_TABLE_WITH_DESCRIPTION);
} }
@Test
public void testInitializeSchema() {
SchemaInitializationProvider schemaInitProvider = new SchemaInitializationProvider(
"A schema",
"/hapi-migrator-it-init-schema",
"HFJ_RES_REINDEX_JOB",
true
);
MigrationTaskList taskList = new MigrationTaskList();
Builder version = HapiMigrationStorageSvcTest.forVersion(taskList);
version.initializeSchema("1", schemaInitProvider);
Builder.BuilderAddTableByColumns nonSchemaInit = version.addTableByColumns("2", "NON_SCHEMA_INIT", "PID");
nonSchemaInit.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
Builder.BuilderAddTableByColumns schemaInit = version.addTableByColumns("3", "SCHEMA_INIT", "PID");
schemaInit.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
schemaInit.withFlags().runEvenDuringSchemaInitialization();
HapiMigrator migrator;
MigrationResult outcome;
/*
* Run the migrator for the first time. This should execute 2 tasks: the initial
* schema initialization, and the task set to run even during initialization. Task
* 2 should not run.
*/
migrator = buildMigrator(taskList.toTaskArray());
outcome = migrator.migrate();
assertThat(toTaskVersionList(outcome)).as(toTaskStatementDescriptions(outcome)).containsExactly("1", "3");
/*
* Run again - Nothing should happen since we've already finished the migration
*/
migrator = buildMigrator(taskList.toTaskArray());
outcome = migrator.migrate();
assertThat(toTaskVersionList(outcome)).as(toTaskStatementDescriptions(outcome)).isEmpty();
/*
* Add another pair of tasks - Both should run
*/
Builder.BuilderAddTableByColumns nonSchemaInit2 = version.addTableByColumns("4", "NON_SCHEMA_INIT_2", "PID");
nonSchemaInit2.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
Builder.BuilderAddTableByColumns schemaInit2 = version.addTableByColumns("5", "SCHEMA_INIT_2", "PID");
schemaInit2.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
schemaInit2.withFlags().runEvenDuringSchemaInitialization();
migrator = buildMigrator(taskList.toTaskArray());
outcome = migrator.migrate();
assertThat(toTaskVersionList(outcome)).as(toTaskStatementDescriptions(outcome)).containsExactly("4", "5");
}
@Test @Test
void test_onecall_noblock() throws InterruptedException, ExecutionException { void test_onecall_noblock() throws InterruptedException, ExecutionException {
@ -65,7 +131,7 @@ class HapiMigratorIT {
HapiMigrator migrator = buildMigrator(latchMigrationTask); HapiMigrator migrator = buildMigrator(latchMigrationTask);
latchMigrationTask.setExpectedCount(1); latchMigrationTask.setExpectedCount(1);
Future<MigrationResult> future = executor.submit(() -> migrator.migrate()); Future<MigrationResult> future = executor.submit(migrator::migrate);
latchMigrationTask.awaitExpected(); latchMigrationTask.awaitExpected();
latchMigrationTask.release("1"); latchMigrationTask.release("1");
@ -91,7 +157,7 @@ class HapiMigratorIT {
// is released, the first one will have already run so there will be nothing to do // is released, the first one will have already run so there will be nothing to do
latchMigrationTask1.setExpectedCount(1); latchMigrationTask1.setExpectedCount(1);
Future<MigrationResult> future1 = executor.submit(() -> migrator1.migrate()); Future<MigrationResult> future1 = executor.submit(migrator1::migrate);
latchMigrationTask1.awaitExpected(); latchMigrationTask1.awaitExpected();
// We wait until the first migration is in the middle of executing the migration task before we start the second one // We wait until the first migration is in the middle of executing the migration task before we start the second one
@ -101,7 +167,7 @@ class HapiMigratorIT {
latchMigrationTask1.release("1"); latchMigrationTask1.release("1");
latchMigrationTask2.setExpectedCount(1); latchMigrationTask2.setExpectedCount(1);
Future<MigrationResult> future2 = executor.submit(() -> migrator2.migrate()); Future<MigrationResult> future2 = executor.submit(migrator2::migrate);
latchMigrationTask2.awaitExpected(); latchMigrationTask2.awaitExpected();
// This second call shouldn't be necessary, but it will help the test fail faster with a clearer error // This second call shouldn't be necessary, but it will help the test fail faster with a clearer error
@ -127,7 +193,7 @@ class HapiMigratorIT {
{ {
latchMigrationTask.setExpectedCount(1); latchMigrationTask.setExpectedCount(1);
Future<MigrationResult> future = executor.submit(() -> migrator.migrate()); Future<MigrationResult> future = executor.submit(migrator::migrate);
latchMigrationTask.awaitExpected(); latchMigrationTask.awaitExpected();
assertEquals(1, countLockRecords()); assertEquals(1, countLockRecords());
latchMigrationTask.release("1"); latchMigrationTask.release("1");
@ -138,7 +204,7 @@ class HapiMigratorIT {
} }
{ {
Future<MigrationResult> future = executor.submit(() -> migrator.migrate()); Future<MigrationResult> future = executor.submit(migrator::migrate);
MigrationResult result = future.get(); MigrationResult result = future.get();
assertEquals(0, countLockRecords()); assertEquals(0, countLockRecords());
@ -147,7 +213,6 @@ class HapiMigratorIT {
} }
@Test @Test
void test_oldLockFails_block() { void test_oldLockFails_block() {
HapiMigrationLock.setMaxRetryAttempts(0); HapiMigrationLock.setMaxRetryAttempts(0);
@ -176,7 +241,31 @@ class HapiMigratorIT {
assertThat(result.succeededTasks).hasSize(1); assertThat(result.succeededTasks).hasSize(1);
} }
@ParameterizedTest
@ValueSource(booleans = {true, false})
void test_HeavyweightSkippable(boolean theShouldRun) throws InterruptedException, ExecutionException {
ExecutorService executor = Executors.newSingleThreadExecutor();
LatchMigrationTask latchMigrationTask = new LatchMigrationTask("only", "1");
latchMigrationTask.addFlag(TaskFlagEnum.HEAVYWEIGHT_SKIP_BY_DEFAULT);
HapiMigrator migrator = buildMigrator(latchMigrationTask);
int expectedInvocations = 0;
if (theShouldRun) {
migrator.setRunHeavyweightSkippableTasks(true);
expectedInvocations = 1;
}
latchMigrationTask.setExpectedCount(expectedInvocations);
Future<MigrationResult> future = executor.submit(migrator::migrate);
latchMigrationTask.awaitExpected();
latchMigrationTask.release("1");
MigrationResult result = future.get();
assertThat(result.succeededTasks).hasSize(expectedInvocations);
}
@SuppressWarnings("DataFlowIssue")
private int countLockRecords() { private int countLockRecords() {
return myJdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + MIGRATION_TABLENAME + " WHERE \"installed_rank\" = " + HapiMigrationLock.LOCK_PID, Integer.class); return myJdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + MIGRATION_TABLENAME + " WHERE \"installed_rank\" = " + HapiMigrationLock.LOCK_PID, Integer.class);
} }
@ -195,8 +284,15 @@ class HapiMigratorIT {
return new HapiMigrator(MIGRATION_TABLENAME, myDataSource, DriverTypeEnum.H2_EMBEDDED); return new HapiMigrator(MIGRATION_TABLENAME, myDataSource, DriverTypeEnum.H2_EMBEDDED);
} }
private static @Nonnull String toTaskStatementDescriptions(MigrationResult outcome) {
return "Statements:\n * " + outcome.executedStatements.stream().map(BaseTask.ExecutedStatement::toString).collect(Collectors.joining("\n * "));
}
private class LatchMigrationTask extends BaseTask implements IPointcutLatch { private static @Nonnull List<String> toTaskVersionList(MigrationResult outcome) {
return outcome.executedStatements.stream().map(BaseTask.ExecutedStatement::getSchemaVersion).toList();
}
private static class LatchMigrationTask extends BaseTask implements IPointcutLatch {
private final PointcutLatch myLatch; private final PointcutLatch myLatch;
private final PointcutLatch myWaitLatch; private final PointcutLatch myWaitLatch;

View File

@ -108,7 +108,7 @@ public class AddTableByColumnTaskTest extends BaseTest {
return theTask1.getColumnName().compareTo(theTask2.getColumnName()); return theTask1.getColumnName().compareTo(theTask2.getColumnName());
}; };
final AddTableByColumnTask addTableByColumnTask = new AddTableByColumnTask(comparator); final AddTableByColumnTask addTableByColumnTask = new AddTableByColumnTask("1", "1", comparator);
addTableByColumnTask.setTableName(tableName); addTableByColumnTask.setTableName(tableName);
addTableByColumnTask.setDriverType(driverType); addTableByColumnTask.setDriverType(driverType);
addTableByColumnTask.setPkColumns(Collections.singletonList(columnNameId)); addTableByColumnTask.setPkColumns(Collections.singletonList(columnNameId));

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException; import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
@ -17,6 +18,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
@SuppressWarnings("SqlDialectInspection")
public class ModifyColumnTest extends BaseTest { public class ModifyColumnTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@ -232,14 +234,11 @@ public class ModifyColumnTest extends BaseTest {
@SuppressWarnings("EnumSwitchStatementWhichMissesCases") @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Nonnull @Nonnull
private JdbcUtils.ColumnType getLongColumnType(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { private JdbcUtils.ColumnType getLongColumnType(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
switch (theTestDatabaseDetails.get().getDriverType()) { return switch (theTestDatabaseDetails.get().getDriverType()) {
case H2_EMBEDDED: case H2_EMBEDDED -> new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 64);
return new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 64); case DERBY_EMBEDDED -> new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19);
case DERBY_EMBEDDED: default -> throw new UnsupportedOperationException();
return new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19); };
default:
throw new UnsupportedOperationException();
}
} }
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@ -270,11 +269,11 @@ public class ModifyColumnTest extends BaseTest {
executeSql("insert into SOMETABLE (TEXTCOL) values ('HELLO')"); executeSql("insert into SOMETABLE (TEXTCOL) values ('HELLO')");
ModifyColumnTask task = new ModifyColumnTask("1", "1"); ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.addFlag(TaskFlagEnum.FAILURE_ALLOWED);
task.setTableName("SOMETABLE"); task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL"); task.setColumnName("TEXTCOL");
task.setColumnType(ColumnTypeEnum.LONG); task.setColumnType(ColumnTypeEnum.LONG);
task.setNullable(true); task.setNullable(true);
task.setFailureAllowed(true);
getMigrator().addTask(task); getMigrator().addTask(task);
getMigrator().migrate(); getMigrator().migrate();

View File

@ -0,0 +1,6 @@
create table HFJ_RES_REINDEX_JOB (
PID bigint not null,
RES_TYPE varchar(100),
primary key (PID)
);

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -187,7 +187,8 @@ public class JpaModelScannerAndVerifier {
scan(nextField, theNames, theIsSuperClass, isView); scan(nextField, theNames, theIsSuperClass, isView);
Id id = nextField.getAnnotation(Id.class); Id id = nextField.getAnnotation(Id.class);
if (id != null) { boolean isId = id != null;
if (isId) {
Validate.isTrue(!foundId, "Multiple fields annotated with @Id"); Validate.isTrue(!foundId, "Multiple fields annotated with @Id");
foundId = true; foundId = true;
@ -241,12 +242,24 @@ public class JpaModelScannerAndVerifier {
int columnLength = 16; int columnLength = 16;
String columnName = null; String columnName = null;
boolean nullable = false;
if (hasColumn) { if (hasColumn) {
columnName = nextField.getAnnotation(Column.class).name(); Column column = nextField.getAnnotation(Column.class);
columnLength = nextField.getAnnotation(Column.class).length(); columnName = column.name();
columnLength = column.length();
nullable = column.nullable();
} }
if (hasJoinColumn) { if (hasJoinColumn) {
columnName = nextField.getAnnotation(JoinColumn.class).name(); JoinColumn joinColumn = nextField.getAnnotation(JoinColumn.class);
columnName = joinColumn.name();
nullable = joinColumn.nullable();
}
if (isId) {
nullable = false;
}
if (nullable && !isView) {
Validate.isTrue(!nextField.getType().isPrimitive(), "Field [%s] has a nullable primitive type: %s", nextField.getName(), nextField.getType());
} }
if (columnName != null) { if (columnName != null) {

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version> <version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

Some files were not shown because too many files have changed in this diff Show More