6034 poor index generation on sql server (#6035)

* Changelog

* Remove uniqueness

* spotless

* Add migrations

* spotless

* make some space

* Handle fancier upgrade

* spotless

* Fix up where clause generation to be a bit more readable

* spotless
This commit is contained in:
Tadgh 2024-06-21 17:36:00 -07:00 committed by GitHub
parent f767058239
commit bce0313458
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 107 additions and 12 deletions

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 6034
title: "Two indexes introduced in HAPI-FHIR 6.6.0, `IDX_SP_URI_HASH_IDENTITY_V2` and `IDX_SP_URI_HASH_URI_V2` were previously created as unique indexes. This has caused issues on SQL Server due to the way that a filtered index is created. The unique clause was not necessary to this index, and has been removed."

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationCopyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.migrate.tasks.api.ColumnAndNullable;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
@ -348,6 +349,37 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
}
{
// Note that these are recreations of a previous migration from 6.6.0. The original migration had these set
// as unique,
// which causes SQL Server to create a filtered index. See
// https://www.sqlshack.com/introduction-to-sql-server-filtered-indexes/
// What this means for hibernate search is that for any column that is nullable, the SQLServerDialect will
// omit the whole row from the index if
// the value of the nullable column is actually null. Removing the uniqueness constraint works around this
// problem.
Builder.BuilderWithTableName uriTable = version.onTable("HFJ_SPIDX_URI");
uriTable.dropIndex("20240620.10", "IDX_SP_URI_HASH_URI_V2");
uriTable.dropIndex("20240620.20", "IDX_SP_URI_HASH_IDENTITY_V2");
uriTable.addIndex("20240620.30", "IDX_SP_URI_HASH_URI_V2")
.unique(false)
.online(true)
.withPossibleNullableColumns(
new ColumnAndNullable("HASH_URI", true),
new ColumnAndNullable("RES_ID", false),
new ColumnAndNullable("PARTITION_ID", true));
uriTable.addIndex("20240620.40", "IDX_SP_URI_HASH_IDENTITY_V2")
.unique(false)
.online(true)
.withPossibleNullableColumns(
new ColumnAndNullable("HASH_IDENTITY", true),
new ColumnAndNullable("SP_URI", true),
new ColumnAndNullable("RES_ID", false),
new ColumnAndNullable("PARTITION_ID", true));
}
}
protected void init720() {

View File

@ -54,12 +54,9 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
name = "HFJ_SPIDX_URI",
indexes = {
// for queries
@Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID", unique = true),
@Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID"),
// for sorting
@Index(
name = "IDX_SP_URI_HASH_IDENTITY_V2",
columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID",
unique = true),
@Index(name = "IDX_SP_URI_HASH_IDENTITY_V2", columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID"),
// for index create/delete
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
})

View File

@ -34,6 +34,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
public class AddIndexTask extends BaseTableTask {
@ -41,6 +42,7 @@ public class AddIndexTask extends BaseTableTask {
private String myIndexName;
private List<String> myColumns;
private List<String> myNullableColumns;
private Boolean myUnique;
private List<String> myIncludeColumns = Collections.emptyList();
/** Should the operation avoid taking a lock on the table */
@ -64,6 +66,14 @@ public class AddIndexTask extends BaseTableTask {
myUnique = theUnique;
}
public List<String> getNullableColumns() {
return myNullableColumns;
}
public void setNullableColumns(List<String> theNullableColumns) {
this.myNullableColumns = theNullableColumns;
}
@Override
public void validate() {
super.validate();
@ -171,14 +181,15 @@ public class AddIndexTask extends BaseTableTask {
@Nonnull
private String buildMSSqlNotNullWhereClause() {
String mssqlWhereClause;
mssqlWhereClause = " WHERE (";
for (int i = 0; i < myColumns.size(); i++) {
mssqlWhereClause += myColumns.get(i) + " IS NOT NULL ";
if (i < myColumns.size() - 1) {
mssqlWhereClause += "AND ";
}
String mssqlWhereClause = "";
if (myNullableColumns == null || myNullableColumns.isEmpty()) {
return mssqlWhereClause;
}
mssqlWhereClause = " WHERE (";
mssqlWhereClause += myNullableColumns.stream()
.map(column -> column + " IS NOT NULL ")
.collect(Collectors.joining("AND"));
mssqlWhereClause += ")";
return mssqlWhereClause;
}
@ -187,6 +198,10 @@ public class AddIndexTask extends BaseTableTask {
setColumns(Arrays.asList(theColumns));
}
public void setNullableColumns(String... theColumns) {
setNullableColumns(Arrays.asList(theColumns));
}
public void setIncludeColumns(String... theIncludeColumns) {
setIncludeColumns(Arrays.asList(theIncludeColumns));
}

View File

@ -405,6 +405,31 @@ public class Builder {
return new BuilderCompleteTask(task);
}
/**
* THis is strictly needed for SQL Server, as it will create filtered indexes on nullable columns, and we have to build a tail clause which matches what the SQL Server Hibernate dialect does.
*/
public BuilderCompleteTask withPossibleNullableColumns(ColumnAndNullable... theColumns) {
String[] columnNames = Arrays.stream(theColumns)
.map(ColumnAndNullable::getColumnName)
.toArray(String[]::new);
String[] nullableColumnNames = Arrays.stream(theColumns)
.filter(ColumnAndNullable::isNullable)
.map(ColumnAndNullable::getColumnName)
.toArray(String[]::new);
AddIndexTask task = new AddIndexTask(myRelease, myVersion);
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(columnNames);
task.setNullableColumns(nullableColumnNames);
task.setOnline(myOnline);
if (myIncludeColumns != null) {
task.setIncludeColumns(myIncludeColumns);
}
addTask(task);
return new BuilderCompleteTask(task);
}
public BuilderAddIndexUnique includeColumns(String... theIncludeColumns) {
myIncludeColumns = theIncludeColumns;
return this;

View File

@ -0,0 +1,22 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
/**
* Simple data class for holding information about a column, and whether it was nullable at time of writing this migration.
*/
public class ColumnAndNullable {
private final String myColumnName;
private final boolean myNullable;
public ColumnAndNullable(String myColumnName, boolean myNullable) {
this.myColumnName = myColumnName;
this.myNullable = myNullable;
}
public String getColumnName() {
return myColumnName;
}
public boolean isNullable() {
return myNullable;
}
}