miscellaneous code cleanups

This commit is contained in:
Gavin King 2022-11-04 20:49:03 +01:00
parent 0d2aa57b5d
commit 3d9bf07ac8
4 changed files with 201 additions and 242 deletions

View File

@ -17,6 +17,7 @@ import java.util.List;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.Remove;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
@ -52,17 +53,15 @@ public abstract class Constraint implements Exportable, Serializable {
public static String generateName(String prefix, Table table, Column... columns) {
// Use a concatenation that guarantees uniqueness, even if identical names
// exist between all table and column identifiers.
StringBuilder sb = new StringBuilder( "table`" + table.getName() + "`" );
final StringBuilder sb = new StringBuilder( "table`" + table.getName() + "`" );
// Ensure a consistent ordering of columns, regardless of the order
// they were bound.
// Clone the list, as sometimes a set of order-dependent Column
// bindings are given.
Column[] alphabeticalColumns = columns.clone();
Arrays.sort( alphabeticalColumns, ColumnComparator.INSTANCE );
final Column[] alphabeticalColumns = columns.clone();
Arrays.sort( alphabeticalColumns, Comparator.comparing( Column::getName ) );
for ( Column column : alphabeticalColumns ) {
String columnName = column == null ? "" : column.getName();
final String columnName = column == null ? "" : column.getName();
sb.append( "column`" ).append( columnName ).append( "`" );
}
return prefix + hashedName( sb.toString() );
@ -74,17 +73,14 @@ public abstract class Constraint implements Exportable, Serializable {
* @return String The generated name
*/
public static String generateName(String prefix, Table table, List<Column> columns) {
//N.B. legacy APIs are involved: can't trust that the columns List is actually
//containing Column instances - the generic type isn't consistently enforced.
ArrayList<Column> defensive = new ArrayList<>( columns.size() );
for ( Object o : columns ) {
if ( o instanceof Column ) {
defensive.add( (Column) o );
}
// else: others might be Formula instances.
// They don't need to be part of the name generation.
}
return generateName( prefix, table, defensive.toArray( new Column[0] ) );
// N.B. legacy APIs are involved: can't trust that the columns List is actually
// containing Column instances - the generic type isn't consistently enforced.
// So some elements might be Formula instances, but they don't need to be part
// of the name generation.
final Column[] defensive = columns.stream()
.filter( (Object thing) -> thing instanceof Column )
.toArray( Column[]::new );
return generateName( prefix, table, defensive);
}
/**
@ -93,17 +89,16 @@ public abstract class Constraint implements Exportable, Serializable {
* that the length of the name will always be smaller than the 30
* character identifier restriction enforced by a few dialects.
*
* @param s
* The name to be hashed.
* @param name The name to be hashed.
* @return String The hashed name.
*/
public static String hashedName(String s) {
public static String hashedName(String name) {
try {
MessageDigest md = MessageDigest.getInstance( "MD5" );
final MessageDigest md = MessageDigest.getInstance( "MD5" );
md.reset();
md.update( s.getBytes() );
byte[] digest = md.digest();
BigInteger bigInt = new BigInteger( 1, digest );
md.update( name.getBytes() );
final byte[] digest = md.digest();
final BigInteger bigInt = new BigInteger( 1, digest );
// By converting to base 35 (full alphanumeric), we guarantee
// that the length of the name will always be smaller than the 30
// character identifier restriction enforced by a few dialects.
@ -114,14 +109,6 @@ public abstract class Constraint implements Exportable, Serializable {
}
}
private static class ColumnComparator implements Comparator<Column> {
public static ColumnComparator INSTANCE = new ColumnComparator();
public int compare(Column col1, Column col2) {
return col1.getName().compareTo( col2.getName() );
}
}
public void addColumn(Column column) {
if ( !columns.contains( column ) ) {
columns.add( column );
@ -151,7 +138,7 @@ public abstract class Constraint implements Exportable, Serializable {
}
public Column getColumn(int i) {
return columns.get( i );
return columns.get( i );
}
@Deprecated(since = "6.0")
@ -175,7 +162,10 @@ public abstract class Constraint implements Exportable, Serializable {
return columns;
}
@Deprecated(since="6.2")
/**
* @deprecated this method is no longer called
*/
@Deprecated(since="6.2") @Remove
public abstract String sqlConstraintString(
SqlStringGenerationContext context,
String constraintName,

View File

@ -88,18 +88,12 @@ public class DenormalizedTable extends Table {
@Override @Deprecated
public Iterator<Column> getColumnIterator() {
return new JoinedIterator<>(
includedTable.getColumnIterator(),
super.getColumnIterator()
);
return new JoinedIterator<>( includedTable.getColumnIterator(), super.getColumnIterator() );
}
@Override
public Collection<Column> getColumns() {
return new JoinedList<>(
new ArrayList<>( includedTable.getColumns() ),
new ArrayList<>( super.getColumns() )
);
return new JoinedList<>( new ArrayList<>( includedTable.getColumns() ), new ArrayList<>( super.getColumns() ) );
}
@Override
@ -112,7 +106,7 @@ public class DenormalizedTable extends Table {
return includedTable.getPrimaryKey();
}
@Override
@Override @Deprecated
public Iterator<UniqueKey> getUniqueKeyIterator() {
if ( !includedTable.isPhysicalTable() ) {
for ( UniqueKey uniqueKey : includedTable.getUniqueKeys().values() ) {
@ -122,7 +116,7 @@ public class DenormalizedTable extends Table {
return getUniqueKeys().values().iterator();
}
@Override
@Override @Deprecated
public Iterator<Index> getIndexIterator() {
final List<Index> indexes = new ArrayList<>();
for ( Index parentIndex : includedTable.getIndexes().values() ) {
@ -132,10 +126,7 @@ public class DenormalizedTable extends Table {
index.addColumns( parentIndex.getColumns() );
indexes.add( index );
}
return new JoinedIterator<>(
indexes.iterator(),
super.getIndexIterator()
);
return new JoinedIterator<>( indexes.iterator(), super.getIndexIterator() );
}
public Table getIncludedTable() {

View File

@ -20,6 +20,7 @@ import java.util.function.Function;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.Remove;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
@ -70,6 +71,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
private List<Function<SqlStringGenerationContext, InitCommand>> initCommandProducers;
@Deprecated(since="6.2") @Remove
public Table() {
this( "orm" );
}
@ -123,10 +125,9 @@ public class Table implements Serializable, ContributableDatabaseObject {
}
public String getQualifiedName(SqlStringGenerationContext context) {
if ( subselect != null ) {
return "( " + subselect + " )";
}
return context.format( new QualifiedTableName( catalog, schema, name ) );
return subselect != null
? "( " + subselect + " )"
: context.format( new QualifiedTableName( catalog, schema, name ) );
}
/**
@ -135,7 +136,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
*/
@Deprecated
public static String qualify(String catalog, String schema, String table) {
StringBuilder qualifiedName = new StringBuilder();
final StringBuilder qualifiedName = new StringBuilder();
if ( catalog != null ) {
qualifiedName.append( catalog ).append( '.' );
}
@ -232,8 +233,10 @@ public class Table implements Serializable, ContributableDatabaseObject {
if ( column == null ) {
return null;
}
final Column myColumn = columns.get( column.getCanonicalName() );
return column.equals( myColumn ) ? myColumn : null;
else {
final Column existing = columns.get( column.getCanonicalName() );
return column.equals( existing ) ? existing : null;
}
}
public Column getColumn(Identifier name) {
@ -255,8 +258,8 @@ public class Table implements Serializable, ContributableDatabaseObject {
final Column old = getColumn( column );
if ( old == null ) {
if ( primaryKey != null ) {
for ( Column c : primaryKey.getColumns() ) {
if ( c.getCanonicalName().equals( column.getCanonicalName() ) ) {
for ( Column pkColumn : primaryKey.getColumns() ) {
if ( pkColumn.getCanonicalName().equals( column.getCanonicalName() ) ) {
column.setNullable( false );
if ( log.isDebugEnabled() ) {
log.debugf(
@ -356,7 +359,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
// condition 1 : check against other unique keys
for ( UniqueKey otherUniqueKey : uniqueKeys.values() ) {
// make sure its not the same unique key
// make sure it's not the same unique key
if ( uniqueKeyEntry.getValue() == otherUniqueKey ) {
continue;
}
@ -387,7 +390,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
return false;
}
return primaryKey.getColumns().containsAll( uniqueKey.getColumns() )
&& uniqueKey.getColumns().containsAll( primaryKey.getColumns() );
&& uniqueKey.getColumns().containsAll( primaryKey.getColumns() );
}
@Override
@ -406,16 +409,17 @@ public class Table implements Serializable, ContributableDatabaseObject {
}
public boolean equals(Table table) {
if (null == table) {
if ( null == table ) {
return false;
}
if (this == table) {
else if ( this == table ) {
return true;
}
return Identifier.areEqual( name, table.name )
else {
return Identifier.areEqual( name, table.name )
&& Identifier.areEqual( schema, table.schema )
&& Identifier.areEqual( catalog, table.catalog );
}
}
public Iterator<String> sqlAlterStrings(
@ -425,11 +429,11 @@ public class Table implements Serializable, ContributableDatabaseObject {
SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException {
final String tableName = sqlStringGenerationContext.format( new QualifiedTableName( catalog, schema, name ) );
StringBuilder root = new StringBuilder( dialect.getAlterTableString( tableName ) )
final StringBuilder root = new StringBuilder( dialect.getAlterTableString( tableName ) )
.append( ' ' )
.append( dialect.getAddColumnString() );
List<String> results = new ArrayList<>();
final List<String> results = new ArrayList<>();
for ( Column column : getColumns() ) {
final ColumnInformation columnInfo = tableInfo.getColumn(
@ -438,11 +442,11 @@ public class Table implements Serializable, ContributableDatabaseObject {
if ( columnInfo == null ) {
// the column doesn't exist at all.
StringBuilder alter = new StringBuilder( root.toString() )
final StringBuilder alter = new StringBuilder( root.toString() )
.append( ' ' )
.append( column.getQuotedName( dialect ) );
String columnType = column.getSqlType(
final String columnType = column.getSqlType(
metadata.getDatabase().getTypeConfiguration(),
dialect,
metadata
@ -451,12 +455,12 @@ public class Table implements Serializable, ContributableDatabaseObject {
alter.append( ' ' ).append(columnType);
}
String defaultValue = column.getDefaultValue();
final String defaultValue = column.getDefaultValue();
if ( defaultValue != null ) {
alter.append( " default " ).append( defaultValue );
}
String generatedAs = column.getGeneratedAs();
final String generatedAs = column.getGeneratedAs();
if ( generatedAs != null) {
alter.append( dialect.generatedAs( generatedAs ) );
}
@ -476,12 +480,12 @@ public class Table implements Serializable, ContributableDatabaseObject {
.getColumnDefinitionUniquenessFragment( column, sqlStringGenerationContext ) );
}
String checkConstraint = column.checkConstraint();
final String checkConstraint = column.checkConstraint();
if ( checkConstraint !=null && dialect.supportsColumnCheck() ) {
alter.append( checkConstraint );
}
String columnComment = column.getComment();
final String columnComment = column.getComment();
if ( columnComment != null ) {
alter.append( dialect.getColumnComment( columnComment ) );
}
@ -513,16 +517,13 @@ public class Table implements Serializable, ContributableDatabaseObject {
}
public Index getOrCreateIndex(String indexName) {
Index index = indexes.get( indexName );
if ( index == null ) {
index = new Index();
index.setName( indexName );
index.setTable( this );
indexes.put( indexName, index );
}
return index;
}
@ -550,11 +551,11 @@ public class Table implements Serializable, ContributableDatabaseObject {
public UniqueKey createUniqueKey(List<Column> keyColumns) {
String keyName = Constraint.generateName( "UK_", this, keyColumns );
UniqueKey uk = getOrCreateUniqueKey( keyName );
UniqueKey uniqueKey = getOrCreateUniqueKey( keyName );
for (Column keyColumn : keyColumns) {
uk.addColumn( keyColumn );
uniqueKey.addColumn( keyColumn );
}
return uk;
return uniqueKey;
}
public UniqueKey getUniqueKey(String keyName) {
@ -562,15 +563,14 @@ public class Table implements Serializable, ContributableDatabaseObject {
}
public UniqueKey getOrCreateUniqueKey(String keyName) {
UniqueKey uk = uniqueKeys.get( keyName );
if ( uk == null ) {
uk = new UniqueKey();
uk.setName( keyName );
uk.setTable( this );
uniqueKeys.put( keyName, uk );
UniqueKey uniqueKey = uniqueKeys.get( keyName );
if ( uniqueKey == null ) {
uniqueKey = new UniqueKey();
uniqueKey.setName( keyName );
uniqueKey.setTable( this );
uniqueKeys.put( keyName, uniqueKey );
}
return uk;
return uniqueKey;
}
public void createForeignKeys() {
@ -588,31 +588,31 @@ public class Table implements Serializable, ContributableDatabaseObject {
List<Column> referencedColumns) {
final ForeignKeyKey key = new ForeignKeyKey( keyColumns, referencedEntityName, referencedColumns );
ForeignKey fk = foreignKeys.get( key );
if ( fk == null ) {
fk = new ForeignKey();
fk.setTable( this );
fk.setReferencedEntityName( referencedEntityName );
fk.setKeyDefinition( keyDefinition );
ForeignKey foreignKey = foreignKeys.get( key );
if ( foreignKey == null ) {
foreignKey = new ForeignKey();
foreignKey.setTable( this );
foreignKey.setReferencedEntityName( referencedEntityName );
foreignKey.setKeyDefinition( keyDefinition );
for (Column keyColumn : keyColumns) {
fk.addColumn( keyColumn );
foreignKey.addColumn( keyColumn );
}
if ( referencedColumns != null ) {
fk.addReferencedColumns( referencedColumns );
foreignKey.addReferencedColumns( referencedColumns );
}
// NOTE : if the name is null, we will generate an implicit name during second pass processing
// after we know the referenced table name (which might not be resolved yet).
fk.setName( keyName );
foreignKey.setName( keyName );
foreignKeys.put( key, fk );
foreignKeys.put( key, foreignKey );
}
if ( keyName != null ) {
fk.setName( keyName );
foreignKey.setName( keyName );
}
return fk;
return foreignKey;
}
@ -651,7 +651,8 @@ public class Table implements Serializable, ContributableDatabaseObject {
}
public String toString() {
StringBuilder buf = new StringBuilder().append( getClass().getSimpleName() )
final StringBuilder buf = new StringBuilder()
.append( getClass().getSimpleName() )
.append( '(' );
if ( getCatalog() != null ) {
buf.append( getCatalog() ).append( "." );
@ -718,11 +719,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
@Override
public String getExportIdentifier() {
return Table.qualify(
render( catalog ),
render( schema ),
name.render()
);
return Table.qualify( render( catalog ), render( schema ), name.render() );
}
private String render(Identifier identifier) {
@ -739,12 +736,9 @@ public class Table implements Serializable, ContributableDatabaseObject {
Objects.requireNonNull( referencedClassName );
this.referencedClassName = referencedClassName;
this.columns = columns.toArray( EMPTY_COLUMN_ARRAY );
if ( referencedColumns != null ) {
this.referencedColumns = referencedColumns.toArray( EMPTY_COLUMN_ARRAY );
}
else {
this.referencedColumns = EMPTY_COLUMN_ARRAY;
}
this.referencedColumns = referencedColumns != null
? referencedColumns.toArray(EMPTY_COLUMN_ARRAY)
: EMPTY_COLUMN_ARRAY;
}
public int hashCode() {
@ -753,7 +747,9 @@ public class Table implements Serializable, ContributableDatabaseObject {
public boolean equals(Object other) {
ForeignKeyKey fkk = (ForeignKeyKey) other;
return fkk != null && Arrays.equals( fkk.columns, columns ) && Arrays.equals( fkk.referencedColumns, referencedColumns );
return fkk != null
&& Arrays.equals( fkk.columns, columns )
&& Arrays.equals( fkk.referencedColumns, referencedColumns );
}
@Override
@ -785,7 +781,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
return Collections.emptyList();
}
else {
List<InitCommand> initCommands = new ArrayList<>();
final List<InitCommand> initCommands = new ArrayList<>();
for ( Function<SqlStringGenerationContext, InitCommand> producer : initCommandProducers ) {
initCommands.add( producer.apply( context ) );
}

View File

@ -11,8 +11,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.hibernate.boot.Metadata;
@ -24,10 +22,8 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
@ -40,9 +36,8 @@ import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy;
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation.ColumnReferenceMapping;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
@ -61,6 +56,10 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
import static org.hibernate.cfg.AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY;
import static org.hibernate.engine.config.spi.StandardConverters.STRING;
import static org.hibernate.internal.util.StringHelper.isEmpty;
import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY;
import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.SKIP;
/**
* @author Steve Ebersole
@ -70,20 +69,15 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
protected HibernateSchemaManagementTool tool;
protected SchemaFilter schemaFilter;
public AbstractSchemaMigrator(
HibernateSchemaManagementTool tool,
SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter == null
? DefaultSchemaFilter.INSTANCE
: schemaFilter;
}
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
public AbstractSchemaMigrator(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter == null ? DefaultSchemaFilter.INSTANCE : schemaFilter;
}
/**
* For testing...
* For testing.
*/
public void setUniqueConstraintStrategy(UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy) {
this.uniqueConstraintStrategy = uniqueConstraintStrategy;
@ -95,11 +89,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor) {
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
tool.getServiceRegistry().getService( JdbcEnvironment.class ),
metadata.getDatabase(),
options.getConfigurationValues()
);
final SqlStringGenerationContext sqlGenerationContext = sqlGenerationContext( metadata, options );
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
@ -107,7 +97,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
ddlTransactionIsolator,
sqlStringGenerationContext,
sqlGenerationContext,
tool
);
@ -123,8 +113,15 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
try {
performMigration( metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect(),
sqlStringGenerationContext, targets );
performMigration(
metadata,
databaseInformation,
options,
contributableInclusionFilter,
jdbcContext.getDialect(),
sqlGenerationContext,
targets
);
}
finally {
for ( GenerationTarget target : targets ) {
@ -152,6 +149,14 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
}
private SqlStringGenerationContext sqlGenerationContext(Metadata metadata, ExecutionOptions options) {
return SqlStringGenerationContextImpl.fromConfigurationMap(
tool.getServiceRegistry().getService( JdbcEnvironment.class ),
metadata.getDatabase(),
options.getConfigurationValues()
);
}
protected abstract NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
@ -164,7 +169,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget[] targets);
private void performMigration(
@ -173,7 +178,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
@ -188,7 +193,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
applySqlStrings(
true,
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlDropStrings( auxiliaryDatabaseObject, metadata, sqlStringGenerationContext ),
.getSqlDropStrings( auxiliaryDatabaseObject, metadata, sqlGenerationContext ),
formatter,
options,
targets
@ -198,10 +203,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
// Create before-table AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
applySqlStrings(
true,
auxiliaryDatabaseObject.sqlCreateStrings( sqlStringGenerationContext ),
auxiliaryDatabaseObject.sqlCreateStrings( sqlGenerationContext ),
formatter,
options,
targets
@ -220,7 +226,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
}
final Map<Namespace, NameSpaceTablesInformation> tablesInformation = new HashMap<>();
Set<Identifier> exportedCatalogs = new HashSet<>();
final Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
final NameSpaceTablesInformation nameSpaceTablesInformation = performTablesMigration(
metadata,
@ -234,28 +240,25 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
tryToCreateSchemas,
exportedCatalogs,
namespace,
sqlStringGenerationContext, targets
sqlGenerationContext, targets
);
tablesInformation.put( namespace, nameSpaceTablesInformation );
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation == null ) {
applySqlStrings(
false,
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
if ( contributableInclusionFilter.matches( sequence ) ) {
checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation =
existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation == null ) {
applySqlStrings(
false,
dialect.getSequenceExporter()
.getSqlCreateStrings( sequence, metadata, sqlGenerationContext ),
formatter,
options,
targets
);
}
}
}
}
@ -276,7 +279,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
sqlGenerationContext, targets );
}
}
}
@ -287,7 +290,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect )) {
applySqlStrings(
true,
auxiliaryDatabaseObject.sqlCreateStrings( sqlStringGenerationContext ),
auxiliaryDatabaseObject.sqlCreateStrings( sqlGenerationContext ),
formatter,
options,
targets
@ -302,11 +305,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
applySqlStrings(
false,
dialect.getTableExporter().getSqlCreateStrings( table, metadata, sqlStringGenerationContext ),
dialect.getTableExporter().getSqlCreateStrings( table, metadata, sqlGenerationContext ),
formatter,
options,
targets
@ -320,7 +323,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
applySqlStrings(
false,
@ -328,7 +331,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
dialect,
metadata,
tableInformation,
sqlStringGenerationContext
sqlGenerationContext
),
formatter,
options,
@ -343,12 +346,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
final Exporter<Index> exporter = dialect.getIndexExporter();
for ( Index index : table.getIndexes().values() ) {
if ( !StringHelper.isEmpty( index.getName() ) ) {
if ( !isEmpty( index.getName() ) ) {
IndexInformation existingIndex = null;
if ( tableInformation != null ) {
existingIndex = findMatchingIndex( index, tableInformation );
@ -356,7 +358,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
if ( existingIndex == null ) {
applySqlStrings(
false,
exporter.getSqlCreateStrings( index, metadata, sqlStringGenerationContext ),
exporter.getSqlCreateStrings( index, metadata, sqlGenerationContext ),
formatter,
options,
targets
@ -377,15 +379,14 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
if ( uniqueConstraintStrategy == null ) {
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy();
}
if ( uniqueConstraintStrategy != UniqueConstraintSchemaUpdateStrategy.SKIP ) {
if ( uniqueConstraintStrategy != SKIP ) {
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) {
// Skip if index already exists. Most of the time, this
// won't work since most Dialects use Constraints. However,
@ -395,10 +396,10 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
indexInfo = tableInfo.getIndex( Identifier.toIdentifier( uniqueKey.getName() ) );
}
if ( indexInfo == null ) {
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
if ( uniqueConstraintStrategy == DROP_RECREATE_QUIETLY ) {
applySqlStrings(
true,
exporter.getSqlDropStrings( uniqueKey, metadata, sqlStringGenerationContext ),
exporter.getSqlDropStrings( uniqueKey, metadata, sqlGenerationContext ),
formatter,
options,
targets
@ -407,7 +408,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
applySqlStrings(
true,
exporter.getSqlCreateStrings( uniqueKey, metadata, sqlStringGenerationContext ),
exporter.getSqlCreateStrings( uniqueKey, metadata, sqlGenerationContext ),
formatter,
options,
targets
@ -417,14 +418,10 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
}
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy(Metadata metadata) {
final ConfigurationService cfgService = ((MetadataImplementor) metadata).getMetadataBuildingOptions()
.getServiceRegistry()
.getService( ConfigurationService.class );
return UniqueConstraintSchemaUpdateStrategy.interpret(
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
);
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy() {
final String updateStrategy = tool.getServiceRegistry().getService( ConfigurationService.class )
.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, STRING );
return UniqueConstraintSchemaUpdateStrategy.interpret( updateStrategy );
}
protected void applyForeignKeys(
@ -434,33 +431,24 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext sqlGenerationContext,
GenerationTarget... targets) {
if ( dialect.hasAlterTable() ) {
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
if ( foreignKey.isPhysicalConstraint() && foreignKey.isCreationEnabled() ) {
boolean existingForeignKeyFound = false;
if ( tableInformation != null ) {
existingForeignKeyFound = checkForExistingForeignKey(
foreignKey,
tableInformation
);
}
if ( !existingForeignKeyFound ) {
// todo : shouldn't we just drop+recreate if FK exists?
// this follows the existing code from legacy SchemaUpdate which just skipped
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata, sqlStringGenerationContext ),
formatter,
options,
targets
);
}
if ( foreignKey.isPhysicalConstraint()
&& foreignKey.isCreationEnabled()
&& ( tableInformation == null || !checkForExistingForeignKey( foreignKey, tableInformation ) ) ) {
// todo : shouldn't we just drop+recreate if FK exists?
// this follows the existing code from legacy SchemaUpdate which just skipped
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata, sqlGenerationContext ),
formatter,
options,
targets
);
}
}
}
@ -478,42 +466,37 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
if ( foreignKey.getName() == null || tableInformation == null ) {
return false;
}
final String referencingColumn = foreignKey.getColumn( 0 ).getName();
final String referencedTable = foreignKey.getReferencedTable().getName();
/*
* Find existing keys based on referencing column and referencedTable. "referencedColumnName" is not checked
* because that always is the primary key of the "referencedTable".
*/
if (equivalentForeignKeyExistsInDatabase(tableInformation, referencingColumn, referencedTable)) {
return true;
else {
final String referencingColumn = foreignKey.getColumn( 0 ).getName();
final String referencedTable = foreignKey.getReferencedTable().getName();
// Find existing keys based on referencing column and referencedTable. "referencedColumnName"
// is not checked because that always is the primary key of the "referencedTable".
return equivalentForeignKeyExistsInDatabase( tableInformation, referencingColumn, referencedTable )
// And finally just compare the name of the key. If a key with the same name exists we
// assume the function is also the same...
|| tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) ) != null;
}
// And at the end just compare the name of the key. If a key with the same name exists we assume the function is
// also the same...
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) ) != null;
}
boolean equivalentForeignKeyExistsInDatabase(TableInformation tableInformation, String referencingColumn, String referencedTable) {
Predicate<ColumnReferenceMapping> mappingPredicate = m -> {
String existingReferencingColumn = m.getReferencingColumnMetadata().getColumnIdentifier().getText();
String existingReferencedTable = m.getReferencedColumnMetadata().getContainingTableInformation().getName().getTableName().getCanonicalName();
return referencingColumn.equalsIgnoreCase( existingReferencingColumn ) && referencedTable.equalsIgnoreCase( existingReferencedTable );
};
Stream<ForeignKeyInformation> keyStream = StreamSupport.stream( tableInformation.getForeignKeys().spliterator(), false );
Stream<ColumnReferenceMapping> mappingStream = keyStream.flatMap( k -> StreamSupport.stream( k.getColumnReferenceMappings().spliterator(), false ) );
return mappingStream.anyMatch( mappingPredicate );
return StreamSupport.stream( tableInformation.getForeignKeys().spliterator(), false )
.flatMap( foreignKeyInformation -> StreamSupport.stream( foreignKeyInformation.getColumnReferenceMappings().spliterator(), false ) )
.anyMatch( columnReferenceMapping -> {
final ColumnInformation referencingColumnMetadata = columnReferenceMapping.getReferencingColumnMetadata();
final ColumnInformation referencedColumnMetadata = columnReferenceMapping.getReferencedColumnMetadata();
final String existingReferencingColumn = referencingColumnMetadata.getColumnIdentifier().getText();
final String existingReferencedTable =
referencedColumnMetadata.getContainingTableInformation().getName().getTableName().getCanonicalName();
return referencingColumn.equalsIgnoreCase( existingReferencingColumn )
&& referencedTable.equalsIgnoreCase( existingReferencedTable );
} );
}
protected void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new SchemaManagementException(
String.format(
"Export identifier [%s] encountered more than once",
exportIdentifier
)
String.format("Export identifier [%s] encountered more than once", exportIdentifier )
);
}
exportIdentifiers.add( exportIdentifier );
@ -526,8 +509,8 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings != null ) {
for ( String sqlString : sqlStrings ) {
applySqlString( quiet, sqlString, formatter, options, targets );
for ( String sql : sqlStrings ) {
applySqlString( quiet, sql, formatter, options, targets );
}
}
}
@ -574,15 +557,15 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
private static void applySqlString(
boolean quiet,
String sqlString,
String sql,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( !StringHelper.isEmpty( sqlString ) ) {
String sqlStringFormatted = formatter.format( sqlString );
if ( !isEmpty( sql ) ) {
final String formattedSql = formatter.format( sql );
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlStringFormatted );
target.accept( formattedSql );
}
catch (CommandAcceptanceException e) {
if ( !quiet ) {
@ -602,8 +585,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
GenerationTarget... targets) {
if ( sqlStrings != null ) {
while ( sqlStrings.hasNext() ) {
final String sqlString = sqlStrings.next();
applySqlString( quiet, sqlString, formatter, options, targets );
applySqlString( quiet, sqlStrings.next(), formatter, options, targets );
}
}
}