4976 adding indexes to foreign keys (#4980)

* adding test and initial migrations

* indexing and verifying foreign keys

* connection with resources

* fixing migration for oracle

* adding change log

* updates and cleanup

* cleanup

* fixing broken tests

* remove unique constraints

* debug code

* cleanup

---------

Co-authored-by: leif stawnyczy <leifstawnyczy@leifs-MacBook-Pro.local>
Co-authored-by: leif stawnyczy <leifstawnyczy@leifs-mbp.home>
This commit is contained in:
TipzCM 2023-06-14 09:20:53 -04:00 committed by GitHub
parent 49c3a18c0a
commit c87ff96d5c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 500 additions and 48 deletions

View File

@ -110,15 +110,16 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
try (DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password)) {
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);
migrator.migrate();
migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);
migrator.migrate();
}
}
protected abstract void addTasks(HapiMigrator theMigrator, String theSkippedVersions);

View File

@ -181,8 +181,6 @@ public class HapiFlywayMigrateDatabaseCommandTest {
// Verify that foreign key FK_SEARCHRES_RES on HFJ_SEARCH_RESULT still exists
foreignKeys = JdbcUtils.getForeignKeys(connectionProperties, "HFJ_RESOURCE", "HFJ_SEARCH_RESULT");
assertTrue(foreignKeys.contains("FK_SEARCHRES_RES"));
}
@Test

View File

@ -0,0 +1,11 @@
---
type: fix
issue: 4976
title: "Added a test to verify that all Foreign Key constraints are explicitly
indexed as well.
Added indexes to a number of tables that declared foreign keys
that weren't explicitly indexed.
This should not only make many of these queries (including $mdm-clear operations)
much faster, it will prevent deadlocks in Oracle and other databases that require
foreign keys to be indexed.
"

View File

@ -57,7 +57,8 @@ import java.util.Date;
}, indexes = {
@Index(name = "IDX_EMPI_MATCH_TGT_VER", columnList = "MATCH_RESULT, TARGET_PID, VERSION"),
// v---- this one
@Index(name = "IDX_EMPI_GR_TGT", columnList = "GOLDEN_RESOURCE_PID, TARGET_PID")
@Index(name = "IDX_EMPI_GR_TGT", columnList = "GOLDEN_RESOURCE_PID, TARGET_PID"),
@Index(name = "FK_EMPI_LINK_TARGET", columnList = "TARGET_PID")
})
@Audited
// This is the table name generated by default by envers, but we set it explicitly for clarity

View File

@ -36,6 +36,9 @@ import static org.apache.commons.lang3.StringUtils.length;
//@formatter:off
@Table(name = "TRM_CODESYSTEM", uniqueConstraints = {
@UniqueConstraint(name = "IDX_CS_CODESYSTEM", columnNames = {"CODE_SYSTEM_URI"})
}, indexes = {
@Index(name = "FK_TRMCODESYSTEM_RES", columnList = "RES_ID"),
@Index(name = "FK_TRMCODESYSTEM_CURVER", columnList = "CURRENT_VERSION_PID")
})
@Entity()
//@formatter:on

View File

@ -33,6 +33,7 @@ import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
@ -50,6 +51,10 @@ import static org.apache.commons.lang3.StringUtils.length;
// Note, we used to have a constraint named IDX_CSV_RESOURCEPID_AND_VER (don't reuse this)
uniqueConstraints = {
@UniqueConstraint(name = TermCodeSystemVersion.IDX_CODESYSTEM_AND_VER, columnNames = {"CODESYSTEM_PID", "CS_VERSION_ID"})
},
indexes = {
@Index(name = "FK_CODESYSVER_RES_ID", columnList = "RES_ID"),
@Index(name = "FK_CODESYSVER_CS_ID", columnList = "CODESYSTEM_PID")
})
@Entity()
public class TermCodeSystemVersion implements Serializable {

View File

@ -44,7 +44,8 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_DESIG", uniqueConstraints = { }, indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_CONCEPTDESIG_CONCEPT", columnList = "CONCEPT_PID", unique = false)
@Index(name = "FK_CONCEPTDESIG_CONCEPT", columnList = "CONCEPT_PID", unique = false),
@Index(name = "FK_CONCEPTDESIG_CSV", columnList = "CS_VER_PID")
})
public class TermConceptDesignation implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -35,6 +35,9 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_MAP", uniqueConstraints = {
@UniqueConstraint(name = "IDX_CONCEPT_MAP_URL", columnNames = {"URL", "VER"})
}, indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_TRMCONCEPTMAP_RES", columnList = "RES_ID")
})
public class TermConceptMap implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -32,7 +32,10 @@ import java.util.List;
import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_MAP_GROUP")
@Table(name = "TRM_CONCEPT_MAP_GROUP",
indexes = {
@Index(name = "FK_TCMGROUP_CONCEPTMAP", columnList = "CONCEPT_MAP_PID")
})
public class TermConceptMapGroup implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -36,7 +36,8 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_MAP_GRP_ELEMENT", indexes = {
@Index(name = "IDX_CNCPT_MAP_GRP_CD", columnList = "SOURCE_CODE")
@Index(name = "IDX_CNCPT_MAP_GRP_CD", columnList = "SOURCE_CODE"),
@Index(name = "FK_TCMGELEMENT_GROUP", columnList = "CONCEPT_MAP_GROUP_PID")
})
public class TermConceptMapGroupElement implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -34,7 +34,8 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_MAP_GRP_ELM_TGT", indexes = {
@Index(name = "IDX_CNCPT_MP_GRP_ELM_TGT_CD", columnList = "TARGET_CODE")
@Index(name = "IDX_CNCPT_MP_GRP_ELM_TGT_CD", columnList = "TARGET_CODE"),
@Index(name = "FK_TCMGETARGET_ELEMENT", columnList = "CONCEPT_MAP_GRP_ELM_PID")
})
public class TermConceptMapGroupElementTarget implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -41,7 +41,8 @@ import java.io.Serializable;
@Table(name = "TRM_CONCEPT_PC_LINK", indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_TERM_CONCEPTPC_CHILD", columnList = "CHILD_PID", unique = false),
@Index(name = "FK_TERM_CONCEPTPC_PARENT", columnList = "PARENT_PID", unique = false)
@Index(name = "FK_TERM_CONCEPTPC_PARENT", columnList = "PARENT_PID", unique = false),
@Index(name = "FK_TERM_CONCEPTPC_CS", columnList = "CODESYSTEM_PID")
})
public class TermConceptParentChildLink implements Serializable {
private static final long serialVersionUID = 1L;

View File

@ -54,7 +54,8 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity
@Table(name = "TRM_CONCEPT_PROPERTY", uniqueConstraints = { }, indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_CONCEPTPROP_CONCEPT", columnList = "CONCEPT_PID", unique = false)
@Index(name = "FK_CONCEPTPROP_CONCEPT", columnList = "CONCEPT_PID", unique = false),
@Index(name = "FK_CONCEPTPROP_CSV", columnList = "CS_VER_PID")
})
public class TermConceptProperty implements Serializable {
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;

View File

@ -36,6 +36,7 @@ import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@ -54,8 +55,13 @@ import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length;
@Table(name = "TRM_VALUESET", uniqueConstraints = {
@UniqueConstraint(name = "IDX_VALUESET_URL", columnNames = {"URL", "VER"})
})
@UniqueConstraint(name = "IDX_VALUESET_URL", columnNames = {"URL", "VER"})
},
indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_TRMVALUESET_RES", columnList = "RES_ID" )
}
)
@Entity()
public class TermValueSet implements Serializable {
public static final int MAX_EXPANSION_STATUS_LENGTH = 50;

View File

@ -34,7 +34,8 @@ import static org.apache.commons.lang3.StringUtils.length;
@Table(name = "TRM_VALUESET_C_DESIGNATION", indexes = {
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@Index(name = "FK_TRM_VALUESET_CONCEPT_PID", columnList = "VALUESET_CONCEPT_PID", unique = false)
@Index(name = "FK_TRM_VALUESET_CONCEPT_PID", columnList = "VALUESET_CONCEPT_PID", unique = false),
@Index(name = "FK_TRM_VSCD_VS_PID", columnList = "VALUESET_PID")
})
@Entity()
public class TermValueSetConceptDesignation implements Serializable {

View File

@ -169,6 +169,114 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.addColumn("20230524.2", "WARNING_MSG")
.nullable()
.type(ColumnTypeEnum.CLOB);
// adding indexes to foreign keys
// this makes our table scans more efficient,
// but it also makes us more stable
// Oracle does not like unindexed foreign keys
version.onTable("NPM_PACKAGE_VER")
.addIndex("20230609.3", "FK_NPM_PKV_PKG")
.unique(false)
.withColumns("PACKAGE_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("NPM_PACKAGE_VER")
.addIndex("20230609.4", "FK_NPM_PKV_RESID")
.unique(false)
.withColumns("BINARY_RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("NPM_PACKAGE_VER_RES")
.addIndex("20230609.5", "FK_NPM_PACKVERRES_PACKVER")
.unique(false)
.withColumns("PACKVER_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("NPM_PACKAGE_VER_RES")
.addIndex("20230609.6", "FK_NPM_PKVR_RESID")
.unique(false)
.withColumns("BINARY_RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("MPI_LINK")
.addIndex("20230609.7", "FK_EMPI_LINK_TARGET")
.unique(false)
.withColumns("TARGET_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CODESYSTEM")
.addIndex("20230609.8", "FK_TRMCODESYSTEM_RES")
.unique(false)
.withColumns("RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CODESYSTEM")
.addIndex("20230609.9", "FK_TRMCODESYSTEM_CURVER")
.unique(false)
.withColumns("CURRENT_VERSION_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CODESYSTEM_VER")
.addIndex("20230609.10", "FK_CODESYSVER_RES_ID")
.unique(false)
.withColumns("RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CODESYSTEM_VER")
.addIndex("20230609.11", "FK_CODESYSVER_CS_ID")
.unique(false)
.withColumns("CODESYSTEM_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_PC_LINK")
.addIndex("20230609.12", "FK_TERM_CONCEPTPC_CS")
.unique(false)
.withColumns("CODESYSTEM_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_PROPERTY")
.addIndex("20230609.13", "FK_CONCEPTPROP_CSV")
.unique(false)
.withColumns("CS_VER_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_VALUESET")
.addIndex("20230609.14", "FK_TRMVALUESET_RES")
.unique(false)
.withColumns("RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_VALUESET_C_DESIGNATION")
.addIndex("20230609.15", "FK_TRM_VSCD_VS_PID")
.unique(false)
.withColumns("VALUESET_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_MAP")
.addIndex("20230609.17", "FK_TRMCONCEPTMAP_RES")
.unique(false)
.withColumns("RES_ID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_DESIG")
.addIndex("20230609.18", "FK_CONCEPTDESIG_CSV")
.unique(false)
.withColumns("CS_VER_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.addIndex("20230609.19", "FK_TCMGROUP_CONCEPTMAP")
.unique(false)
.withColumns("CONCEPT_MAP_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addIndex("20230609.20", "FK_TCMGELEMENT_GROUP")
.unique(false)
.withColumns("CONCEPT_MAP_GROUP_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addIndex("20230609.21", "FK_TCMGETARGET_ELEMENT")
.unique(false)
.withColumns("CONCEPT_MAP_GRP_ELM_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
}
protected void init660() {

View File

@ -48,7 +48,9 @@ import java.util.List;
@Entity()
@Table(name = "NPM_PACKAGE_VER", uniqueConstraints = {
}, indexes = {
@Index(name = "IDX_PACKVER", columnList = "PACKAGE_ID,VERSION_ID", unique = true)
@Index(name = "IDX_PACKVER", columnList = "PACKAGE_ID,VERSION_ID", unique = true),
@Index(name = "FK_NPM_PKV_PKG", columnList = "PACKAGE_PID"),
@Index(name = "FK_NPM_PKV_RESID", columnList = "BINARY_RES_ID")
})
public class NpmPackageVersionEntity {

View File

@ -45,7 +45,9 @@ import java.util.Date;
@Entity()
@Table(name = "NPM_PACKAGE_VER_RES", uniqueConstraints = {
}, indexes = {
@Index(name = "IDX_PACKVERRES_URL", columnList = "CANONICAL_URL")
@Index(name = "IDX_PACKVERRES_URL", columnList = "CANONICAL_URL"),
@Index(name = "FK_NPM_PACKVERRES_PACKVER", columnList = "PACKVER_PID"),
@Index(name = "FK_NPM_PKVR_RESID", columnList = "BINARY_RES_ID")
})
public class NpmPackageVersionResourceEntity {

View File

@ -38,8 +38,8 @@ import java.io.Serializable;
@Entity
@Table(name = "HFJ_HISTORY_TAG", uniqueConstraints = {
@UniqueConstraint(name = "IDX_RESHISTTAG_TAGID", columnNames = {"RES_VER_PID", "TAG_ID"}),
}, indexes = {
@Index(name = "IDX_RESHISTTAG_RESID", columnList="RES_ID")
}, indexes = {
@Index(name = "IDX_RESHISTTAG_RESID", columnList = "RES_ID")
})
public class ResourceHistoryTag extends BaseTag implements Serializable {

View File

@ -0,0 +1,35 @@
package ca.uhn.fhir.jpa.embedded;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
public class DatabaseInitializerHelper {
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseInitializerHelper.class);
public void initializePersistenceSchema(JpaEmbeddedDatabase theDatabase) {
String fileName = String.format("migration/releases/%s/schema/%s.sql", HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType());
String sql = getSqlFromResourceFile(fileName);
theDatabase.executeSqlAsBatch(sql);
}
public void insertPersistenceTestData(JpaEmbeddedDatabase theDatabase) {
String fileName = String.format("migration/releases/%s/data/%s.sql", HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType());
String sql = getSqlFromResourceFile(fileName);
theDatabase.insertTestData(sql);
}
public String getSqlFromResourceFile(String theFileName) {
try {
ourLog.info("Loading file: {}", theFileName);
final URL resource = this.getClass().getClassLoader().getResource(theFileName);
return Files.readString(Paths.get(resource.toURI()));
} catch (Exception e) {
throw new RuntimeException("Error loading file: " + theFileName, e);
}
}
}

View File

@ -48,6 +48,8 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback {
private final Set<JpaEmbeddedDatabase> myEmbeddedDatabases = new HashSet<>();
private final DatabaseInitializerHelper myDatabaseInitializerHelper = new DatabaseInitializerHelper();
public HapiEmbeddedDatabasesExtension() {
myEmbeddedDatabases.add(new H2EmbeddedDatabase());
myEmbeddedDatabases.add(new PostgresEmbeddedDatabase());
@ -90,17 +92,11 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback {
}
public void initializePersistenceSchema(DriverTypeEnum theDriverType) {
JpaEmbeddedDatabase embeddedDatabase = getEmbeddedDatabase(theDriverType);
String fileName = String.format("migration/releases/%s/schema/%s.sql", FIRST_TESTED_VERSION, embeddedDatabase.getDriverType());
String sql = getSqlFromResourceFile(fileName);
embeddedDatabase.executeSqlAsBatch(sql);
myDatabaseInitializerHelper.initializePersistenceSchema(getEmbeddedDatabase(theDriverType));
}
public void insertPersistenceTestData(DriverTypeEnum theDriverType) {
JpaEmbeddedDatabase embeddedDatabase = getEmbeddedDatabase(theDriverType);
String fileName = String.format("migration/releases/%s/data/%s.sql", FIRST_TESTED_VERSION, embeddedDatabase.getDriverType());
String sql = getSqlFromResourceFile(fileName);
embeddedDatabase.insertTestData(sql);
myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType));
}
public String getSqlFromResourceFile(String theFileName) {

View File

@ -0,0 +1,228 @@
package ca.uhn.fhir.jpa.embedded;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import org.intellij.lang.annotations.Language;
import org.postgresql.jdbc.PgArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collection;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* This test class is used to verify all foreign key constraints
* are indexed as well.
* --
* We use postgres to do this, because H2 automatically adds
* indexes to fk contraints. But Oracle, Postgres, etc do not.
* And Oracle will have deadlocks if there are unindexed FKs
* --
* It should be noted that because of this restriction,
* Indexes on columns that are foreign keys must have the
* exact same name as their FK constraint name.
* ---
* If while running this test, you get the error
* "Could not find a valid docker environment"
* and you have docker running, the issue
* is permissions.
* See <a href="https://stackoverflow.com/questions/61108655/test-container-test-cases-are-failing-due-to-could-not-find-a-valid-docker-envi">...</a>
*/
public class HapiForeignKeyIndexHelper {
@Language("SQL")
private static final String FK_QUERY = """
WITH fk_actions ( code, action ) AS (
VALUES ( 'a', 'error' ),
( 'r', 'restrict' ),
( 'c', 'cascade' ),
( 'n', 'set null' ),
( 'd', 'set default' )
),
fk_list AS (
SELECT pg_constraint.oid as fkoid, conrelid, confrelid as parentid,
conname, relname, nspname,
fk_actions_update.action as update_action,
fk_actions_delete.action as delete_action,
conkey as key_cols
FROM pg_constraint
JOIN pg_class ON conrelid = pg_class.oid
JOIN pg_namespace ON pg_class.relnamespace = pg_namespace.oid
JOIN fk_actions AS fk_actions_update ON confupdtype = fk_actions_update.code
JOIN fk_actions AS fk_actions_delete ON confdeltype = fk_actions_delete.code
WHERE contype = 'f'
-- unique keys are indexed by default; so exclude any UNIQUE column constraints
AND not (conkey = any(select conkey from pg_constraint where contype = 'u'))
),
fk_attributes AS (
SELECT fkoid, conrelid, attname, attnum
FROM fk_list
JOIN pg_attribute
ON conrelid = attrelid
AND attnum = ANY( key_cols )
ORDER BY fkoid, attnum
),
fk_cols_list AS (
SELECT fkoid, array_agg(attname) as cols_list
FROM fk_attributes
GROUP BY fkoid
),
index_list AS (
SELECT indexrelid as indexid,
pg_class.relname as indexname,
indrelid,
indkey,
indpred is not null as has_predicate,
pg_get_indexdef(indexrelid) as indexdef
FROM pg_index
JOIN pg_class ON indexrelid = pg_class.oid
WHERE indisvalid
),
fk_index_match AS (
SELECT fk_list.*,
indexid,
indexname,
indkey::int[] as indexatts,
has_predicate,
indexdef,
array_length(key_cols, 1) as fk_colcount,
array_length(indkey,1) as index_colcount,
round(pg_relation_size(conrelid)/(1024^2)::numeric) as table_mb,
cols_list
FROM fk_list
JOIN fk_cols_list USING (fkoid)
LEFT OUTER JOIN index_list
ON conrelid = indrelid
AND (indkey::int2[])[0:(array_length(key_cols,1) -1)] @> key_cols
),
fk_perfect_match AS (
SELECT fkoid
FROM fk_index_match
WHERE (index_colcount - 1) <= fk_colcount
AND NOT has_predicate
AND indexdef LIKE '%USING btree%'
),
fk_index_check AS (
SELECT 'no index' as issue, *, 1 as issue_sort
FROM fk_index_match
WHERE indexid IS NULL
UNION ALL
SELECT 'questionable index' as issue, *, 2
FROM fk_index_match
WHERE indexid IS NOT NULL
AND fkoid NOT IN (
SELECT fkoid
FROM fk_perfect_match)
),
parent_table_stats AS (
SELECT fkoid, tabstats.relname as parent_name,
(n_tup_ins + n_tup_upd + n_tup_del + n_tup_hot_upd) as parent_writes,
round(pg_relation_size(parentid)/(1024^2)::numeric) as parent_mb
FROM pg_stat_user_tables AS tabstats
JOIN fk_list
ON relid = parentid
),
fk_table_stats AS (
SELECT fkoid,
(n_tup_ins + n_tup_upd + n_tup_del + n_tup_hot_upd) as writes,
seq_scan as table_scans
FROM pg_stat_user_tables AS tabstats
JOIN fk_list
ON relid = conrelid
)
SELECT nspname as schema_name,
relname as table_name,
conname as fk_name,
issue,
table_mb,
writes,
table_scans,
parent_name,
parent_mb,
parent_writes,
cols_list,
indexdef
FROM fk_index_check
JOIN parent_table_stats USING (fkoid)
JOIN fk_table_stats USING (fkoid)
WHERE issue = 'no index'
ORDER BY issue_sort, table_mb DESC, table_name, fk_name;
""";
// columns
private static final String TABLE_NAME = "table_name";
private static final String FK_NAME = "fk_name";
private static final String PARENT_TABLE_NAME = "parent_name";
private static final String COLS_LIST = "cols_list";
private static final String ISSUE = "issue";
private static final Logger ourLog = LoggerFactory.getLogger(HapiForeignKeyIndexHelper.class);
protected static final Multimap<String, String> ourTableToColumnsWhitelist = HashMultimap.create();
private static final String MESSAGE = "\nUnindexed foreign key detected!\nTable: %s, Column: %s, FKIndex Name: %s, Parent Table: %s, Issue: %s";
public HapiForeignKeyIndexHelper() {
populateWhiteList();
}
/**
* This method populates a whitelist of table name -> column name
* for foreign key constraints that do not have proper indexes.
* --
* Any whitelisted table:column pairing should be documented why it
* doesn't require indexing, or be provided an explicit index.
*/
protected void populateWhiteList() {
// HFJ_BLK_EXPORT_COLFILE - deprecated table
ourTableToColumnsWhitelist.put("HFJ_BLK_EXPORT_COLFILE", "COLLECTION_PID");
// HFJ_BLK_EXPORT_COLLECTION - deprecated table
ourTableToColumnsWhitelist.put("HFJ_BLK_EXPORT_COLLECTION", "JOB_PID");
// TODO - LS - entries below here require further investigation
// MPI_LINK_AID - autogenerated table
ourTableToColumnsWhitelist.put("MPI_LINK_AUD", "REV");
}
public void ensureAllForeignKeysAreIndexed(DataSource theDataSource) throws SQLException {
try (Connection connection = theDataSource.getConnection()) {
try (Statement statement = connection.createStatement()) {
ResultSet results = statement.executeQuery(FK_QUERY);
while (results.next()) {
PgArray postgresArray = (PgArray) results.getArray(COLS_LIST);
String[] columns = (String[]) postgresArray.getArray();
String tableName = results.getString(TABLE_NAME);
String fkConstraintName = results.getString(FK_NAME);
String parentTableName = results.getString(PARENT_TABLE_NAME);
String issue = results.getString(ISSUE);
Collection<String> whitelistColumns = ourTableToColumnsWhitelist.get(tableName.toUpperCase());
for (String col : columns) {
boolean isWhitelisted = whitelistColumns.contains(col.toUpperCase());
if (!isWhitelisted) {
ourLog.error(String.format(MESSAGE,
tableName,
col,
fkConstraintName,
parentTableName,
issue));
}
assertTrue(isWhitelisted,
String.format("Unindexed foreign key detected! Table.column: %s.%s.", tableName, col)
);
}
}
}
}
}
}

View File

@ -10,7 +10,6 @@ import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.dbcp2.BasicDataSource;
import org.h2.jdbcx.JdbcDataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@ -46,7 +45,7 @@ public class HapiSchemaMigrationTest {
@ParameterizedTest
@ArgumentsSource(HapiEmbeddedDatabasesExtension.DatabaseVendorProvider.class)
public void testMigration(DriverTypeEnum theDriverType) {
public void testMigration(DriverTypeEnum theDriverType) throws SQLException {
// ensure all migrations are run
HapiSystemProperties.disableUnitTestMode();
@ -55,7 +54,8 @@ public class HapiSchemaMigrationTest {
myEmbeddedServersExtension.initializePersistenceSchema(theDriverType);
myEmbeddedServersExtension.insertPersistenceTestData(theDriverType);
DataSource dataSource = myEmbeddedServersExtension.getDataSource(theDriverType);
JpaEmbeddedDatabase database = myEmbeddedServersExtension.getEmbeddedDatabase(theDriverType);
DataSource dataSource = database.getDataSource();
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, theDriverType, HAPI_FHIR_MIGRATION_TABLENAME);
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
@ -72,6 +72,16 @@ public class HapiSchemaMigrationTest {
schemaMigrator.setDriverType(theDriverType);
schemaMigrator.createMigrationTableIfRequired();
schemaMigrator.migrate();
if (theDriverType == DriverTypeEnum.POSTGRES_9_4) {
// we only run this for postgres because:
// 1 we only really need to check one db
// 2 H2 automatically adds indexes to foreign keys automatically (and so cannot be used)
// 3 Oracle doesn't run on everyone's machine (and is difficult to do so)
// 4 Postgres is generally the fastest/least terrible relational db supported
new HapiForeignKeyIndexHelper()
.ensureAllForeignKeysAreIndexed(dataSource);
}
}

View File

@ -60,6 +60,10 @@
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc11</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>

View File

@ -19,8 +19,8 @@
*/
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.Validate;

View File

@ -24,9 +24,10 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.ClasspathUtil;
import com.google.common.base.Ascii;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.collect.Multimap;
import com.google.common.reflect.ClassPath;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
@ -61,6 +62,7 @@ import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -70,6 +72,7 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* This class is only used at build-time. It scans the various Hibernate entity classes
@ -80,16 +83,26 @@ public class JpaModelScannerAndVerifier {
public static final int MAX_COL_LENGTH = 4000;
private static final int MAX_LENGTH = 30;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(JpaModelScannerAndVerifier.class);
// Exceptions set because H2 sets indexes for FKs automatically so this index had to be called as the target FK field
// it is indexing to avoid SchemaMigrationTest to complain about the extra index (which doesn't exist in H2)
private static final Set<String> duplicateNameValidationExceptionList = Sets.newHashSet(
"FK_CONCEPTPROP_CONCEPT",
"FK_CONCEPTDESIG_CONCEPT",
"FK_TERM_CONCEPTPC_CHILD",
"FK_TERM_CONCEPTPC_PARENT",
"FK_TRM_VALUESET_CONCEPT_PID",
"FK_SEARCHINC_SEARCH"
);
/**
* We will keep track of all the index names and the columns they
* refer to.
* ---
* H2 automatically adds Indexes to ForeignKey constraints.
* This *does not happen* in other databases.
* ---
* But because we should be indexing foreign keys, we have to explicitly
* add an index to a foreign key.
* But if we give it a new name, SchemaMigrationTest will complain about an extra
* index that doesn't exist in H2.
* ---
* tl;dr
* Due to the quirks of supported DBs, we must have index names that duplicate their
* foreign key constraint names.
* So we'll be keeping a list of them here.
*/
private static final Multimap<String, String> ourIndexNameToColumn = HashMultimap.create();
private static Set<String> ourReservedWords;
public JpaModelScannerAndVerifier() {
super();
@ -283,6 +296,12 @@ public class JpaModelScannerAndVerifier {
assertNotADuplicateName(nextConstraint.name(), theNames);
Validate.isTrue(nextConstraint.name().startsWith("IDX_") || nextConstraint.name().startsWith("FK_"),
nextConstraint.name() + " must start with IDX_ or FK_ (last one when indexing a FK column)");
// add the index names to the collection of allowable duplicate fk names
String[] cols = nextConstraint.columnList().split(",");
for (String col : cols) {
ourIndexNameToColumn.put(nextConstraint.name(), col);
}
}
}
@ -306,7 +325,13 @@ public class JpaModelScannerAndVerifier {
Validate.isTrue(fk.name().startsWith("FK_") || legacySPHibernateFKNames.contains(fk.name()),
"Foreign key " + fk.name() + " on " + theAnnotatedElement + " must start with FK_");
if (!duplicateNameValidationExceptionList.contains(fk.name())) {
if (ourIndexNameToColumn.containsKey(fk.name())) {
// this foreign key has the same name as an existing index
// let's make sure it's on the same column
Collection<String> columns = ourIndexNameToColumn.get(fk.name());
assertTrue(columns.contains(columnName), String.format("Foreign key %s duplicates index name, but column %s is not part of the index!", fk.name(), columnName));
} else {
// verify it's not a duplicate
assertNotADuplicateName(fk.name(), theNames);
}
}

View File

@ -1951,6 +1951,11 @@
<artifactId>postgresql</artifactId>
<version>42.5.1</version>
</dependency>
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc11</artifactId>
<version>21.5.0.0</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>