From 782d2c9707276b5415f4c8fc5286856494b29a43 Mon Sep 17 00:00:00 2001 From: Gavin Date: Mon, 19 Dec 2022 01:07:28 +0100 Subject: [PATCH] refactor SchemaCreatorImpl, SchemaDropperImpl into bite-sized methods --- .../internal/DatabaseInformationImpl.java | 22 +- .../internal/ExtractionContextImpl.java | 12 +- .../internal/AbstractSchemaMigrator.java | 3 +- .../internal/AbstractSchemaValidator.java | 6 +- .../internal/GroupedSchemaMigratorImpl.java | 14 +- .../tool/schema/internal/Helper.java | 6 +- .../HibernateSchemaManagementTool.java | 4 +- .../IndividuallySchemaMigratorImpl.java | 14 +- .../schema/internal/SchemaCreatorImpl.java | 816 +++++++++++------- .../schema/internal/SchemaDropperImpl.java | 521 +++++------ .../schema/internal/SchemaTruncatorImpl.java | 24 +- .../internal/StandardTableMigrator.java | 10 +- .../tool/schema/internal/TableMigrator.java | 2 +- .../exec/ImprovedExtractionContextImpl.java | 12 +- .../tool/schema/spi/ExtractionTool.java | 3 +- 15 files changed, 825 insertions(+), 644 deletions(-) diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java index f2ad3077ad..53fa175a89 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java @@ -32,7 +32,7 @@ import org.hibernate.tool.schema.spi.SchemaManagementTool; public class DatabaseInformationImpl implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess { private final JdbcEnvironment jdbcEnvironment; - private final SqlStringGenerationContext sqlStringGenerationContext; + private final SqlStringGenerationContext context; private final ExtractionContext extractionContext; private final InformationExtractor extractor; @@ -41,15 +41,15 @@ public class DatabaseInformationImpl public DatabaseInformationImpl( ServiceRegistry serviceRegistry, JdbcEnvironment jdbcEnvironment, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, DdlTransactionIsolator ddlTransactionIsolator, SchemaManagementTool tool) throws SQLException { this.jdbcEnvironment = jdbcEnvironment; - this.sqlStringGenerationContext = sqlStringGenerationContext; + this.context = context; this.extractionContext = tool.getExtractionTool().createExtractionContext( serviceRegistry, jdbcEnvironment, - sqlStringGenerationContext, + context, ddlTransactionIsolator, this ); @@ -80,13 +80,13 @@ public class DatabaseInformationImpl @Override public boolean catalogExists(Identifier catalog) { - return extractor.catalogExists( sqlStringGenerationContext.catalogWithDefault( catalog ) ); + return extractor.catalogExists( context.catalogWithDefault( catalog ) ); } @Override public boolean schemaExists(Namespace.Name namespace) { - return extractor.schemaExists( sqlStringGenerationContext.catalogWithDefault( namespace.getCatalog() ), - sqlStringGenerationContext.schemaWithDefault( namespace.getSchema() ) ); + return extractor.schemaExists( context.catalogWithDefault( namespace.getCatalog() ), + context.schemaWithDefault( namespace.getSchema() ) ); } @Override @@ -111,16 +111,16 @@ public class DatabaseInformationImpl } return extractor.getTable( - sqlStringGenerationContext.catalogWithDefault( tableName.getCatalogName() ), - sqlStringGenerationContext.schemaWithDefault( tableName.getSchemaName() ), + context.catalogWithDefault( tableName.getCatalogName() ), + context.schemaWithDefault( tableName.getSchemaName() ), tableName.getTableName() ); } @Override public NameSpaceTablesInformation getTablesInformation(Namespace namespace) { - return extractor.getTables( sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() ), - sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() ) ); + return extractor.getTables( context.catalogWithDefault( namespace.getPhysicalName().getCatalog() ), + context.schemaWithDefault( namespace.getPhysicalName().getSchema() ) ); } @Override diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ExtractionContextImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ExtractionContextImpl.java index 2dd8d53653..d75063444a 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ExtractionContextImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ExtractionContextImpl.java @@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext; public class ExtractionContextImpl implements ExtractionContext { private final ServiceRegistry serviceRegistry; private final JdbcEnvironment jdbcEnvironment; - private final SqlStringGenerationContext sqlStringGenerationContext; + private final SqlStringGenerationContext context; private final JdbcConnectionAccess jdbcConnectionAccess; private final DatabaseObjectAccess registeredTableAccess; @@ -33,12 +33,12 @@ public class ExtractionContextImpl implements ExtractionContext { public ExtractionContextImpl( ServiceRegistry serviceRegistry, JdbcEnvironment jdbcEnvironment, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, JdbcConnectionAccess jdbcConnectionAccess, DatabaseObjectAccess registeredTableAccess) { this.serviceRegistry = serviceRegistry; this.jdbcEnvironment = jdbcEnvironment; - this.sqlStringGenerationContext = sqlStringGenerationContext; + this.context = context; this.jdbcConnectionAccess = jdbcConnectionAccess; this.registeredTableAccess = registeredTableAccess; } @@ -55,7 +55,7 @@ public class ExtractionContextImpl implements ExtractionContext { @Override public SqlStringGenerationContext getSqlStringGenerationContext() { - return sqlStringGenerationContext; + return context; } @Override @@ -86,12 +86,12 @@ public class ExtractionContextImpl implements ExtractionContext { @Override public Identifier getDefaultCatalog() { - return sqlStringGenerationContext.getDefaultCatalog(); + return context.getDefaultCatalog(); } @Override public Identifier getDefaultSchema() { - return sqlStringGenerationContext.getDefaultSchema(); + return context.getDefaultSchema(); } @Override diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaMigrator.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaMigrator.java index 2161e1ea28..2d3d33c2d1 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaMigrator.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaMigrator.java @@ -8,7 +8,6 @@ package org.hibernate.tool.schema.internal; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.stream.StreamSupport; @@ -62,6 +61,8 @@ import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.DRO import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.SKIP; /** + * Base implementation of {@link SchemaMigrator}. + * * @author Steve Ebersole */ public abstract class AbstractSchemaMigrator implements SchemaMigrator { diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaValidator.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaValidator.java index bcda2305be..85a35f31bd 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaValidator.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/AbstractSchemaValidator.java @@ -34,6 +34,8 @@ import org.hibernate.type.descriptor.JdbcTypeNameMapper; import org.jboss.logging.Logger; /** + * Base implementation of {@link SchemaValidator}. + * * @author Steve Ebersole */ public abstract class AbstractSchemaValidator implements SchemaValidator { @@ -54,7 +56,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator { Metadata metadata, ExecutionOptions options, ContributableMatcher contributableInclusionFilter) { - SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( + SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap( tool.getServiceRegistry().getService( JdbcEnvironment.class ), metadata.getDatabase(), options.getConfigurationValues() @@ -65,7 +67,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator { final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation( tool.getServiceRegistry(), isolator, - sqlStringGenerationContext, + context, tool ); diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/GroupedSchemaMigratorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/GroupedSchemaMigratorImpl.java index 17f2052547..f3bba93db9 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/GroupedSchemaMigratorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/GroupedSchemaMigratorImpl.java @@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.SchemaFilter; /** - * @author Andrea Boriero - * * This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call * to retrieve all the database table in order to determine if all the {@link jakarta.persistence.Entity} have a mapped database tables. + * + * @author Andrea Boriero */ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator { @@ -50,7 +50,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator { boolean tryToCreateSchemas, Set exportedCatalogs, Namespace namespace, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, GenerationTarget[] targets) { final NameSpaceTablesInformation tablesInformation = new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); @@ -76,12 +76,12 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator { checkExportIdentifier( table, exportIdentifiers ); final TableInformation tableInformation = tables.getTableInformation( table ); if ( tableInformation == null ) { - createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets ); + createTable( table, dialect, metadata, formatter, options, context, targets ); } else if ( tableInformation.isPhysicalTable() ) { tablesInformation.addTableInformation( tableInformation ); migrateTable( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); } } } @@ -93,9 +93,9 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator { final TableInformation tableInformation = tablesInformation.getTableInformation( table ); if ( tableInformation == null || tableInformation.isPhysicalTable() ) { applyIndexes( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/Helper.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/Helper.java index a1e68a22f8..cc82da1840 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/Helper.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/Helper.java @@ -48,7 +48,7 @@ public class Helper { private static final Pattern COMMA_PATTERN = Pattern.compile( "\\s*,\\s*" ); public static ScriptSourceInput interpretScriptSourceSetting( - Object scriptSourceSetting, + Object scriptSourceSetting, //Reader or String URL ClassLoaderService classLoaderService, String charsetName ) { if ( scriptSourceSetting instanceof Reader ) { @@ -169,14 +169,14 @@ public class Helper { public static DatabaseInformation buildDatabaseInformation( ServiceRegistry serviceRegistry, DdlTransactionIsolator ddlTransactionIsolator, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, SchemaManagementTool tool) { final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class ); try { return new DatabaseInformationImpl( serviceRegistry, jdbcEnvironment, - sqlStringGenerationContext, + context, ddlTransactionIsolator, tool ); diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/HibernateSchemaManagementTool.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/HibernateSchemaManagementTool.java index c8347538db..2c407012a1 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/HibernateSchemaManagementTool.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/HibernateSchemaManagementTool.java @@ -449,13 +449,13 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv public ExtractionContext createExtractionContext( ServiceRegistry serviceRegistry, JdbcEnvironment jdbcEnvironment, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, DdlTransactionIsolator ddlTransactionIsolator, ExtractionContext.DatabaseObjectAccess databaseObjectAccess) { return new ImprovedExtractionContextImpl( serviceRegistry, jdbcEnvironment, - sqlStringGenerationContext, + context, ddlTransactionIsolator, databaseObjectAccess ); diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/IndividuallySchemaMigratorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/IndividuallySchemaMigratorImpl.java index 9b0a7df14c..ec4791cd7a 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/IndividuallySchemaMigratorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/IndividuallySchemaMigratorImpl.java @@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.SchemaFilter; /** - * @author Andrea Boriero - * * This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call * for each {@link jakarta.persistence.Entity} in order to determine if a corresponding database table exists. + * + * @author Andrea Boriero */ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator { @@ -50,7 +50,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator { boolean tryToCreateSchemas, Set exportedCatalogs, Namespace namespace, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, GenerationTarget[] targets) { final NameSpaceTablesInformation tablesInformation = new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); @@ -74,12 +74,12 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator { checkExportIdentifier( table, exportIdentifiers ); final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() ); if ( tableInformation == null ) { - createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets ); + createTable( table, dialect, metadata, formatter, options, context, targets ); } else if ( tableInformation.isPhysicalTable() ) { tablesInformation.addTableInformation( tableInformation ); migrateTable( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); } } } @@ -91,9 +91,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator { final TableInformation tableInformation = tablesInformation.getTableInformation( table ); if ( tableInformation == null || tableInformation.isPhysicalTable() ) { applyIndexes( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, - sqlStringGenerationContext, targets ); + context, targets ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java index 02a6ab62d7..96a0556bba 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java @@ -27,7 +27,6 @@ import org.hibernate.boot.model.relational.SqlStringGenerationContext; import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.spi.MetadataImplementor; -import org.hibernate.cfg.AvailableSettings; import org.hibernate.dialect.Dialect; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; @@ -35,9 +34,6 @@ import org.hibernate.engine.jdbc.internal.FormatStyle; import org.hibernate.engine.jdbc.internal.Formatter; import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.internal.util.StringHelper; -import org.hibernate.internal.util.collections.CollectionHelper; -import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.Index; import org.hibernate.mapping.Table; @@ -64,13 +60,17 @@ import org.hibernate.tool.schema.spi.SqlScriptCommandExtractor; import org.hibernate.tool.schema.spi.TargetDescriptor; import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME; +import static org.hibernate.cfg.AvailableSettings.HBM2DDL_IMPORT_FILES; import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE; import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE; +import static org.hibernate.internal.util.StringHelper.isNotEmpty; +import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize; +import static org.hibernate.internal.util.config.ConfigurationHelper.getString; +import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled; import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting; /** - * This is functionally nothing more than the creation script from the older SchemaExport class (plus some - * additional stuff in the script). + * Basic implementation of {@link SchemaCreator}. * * @author Steve Ebersole */ @@ -113,19 +113,18 @@ public class SchemaCreatorImpl implements SchemaCreator { ContributableMatcher contributableInclusionFilter, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor) { - if ( targetDescriptor.getTargetTypes().isEmpty() ) { - return; + if ( !targetDescriptor.getTargetTypes().isEmpty() ) { + final Map configuration = options.getConfigurationValues(); + final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration ); + doCreation( + metadata, + jdbcContext.getDialect(), + options, + contributableInclusionFilter, + sourceDescriptor, + tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true ) + ); } - - final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); - final GenerationTarget[] targets = tool.buildGenerationTargets( - targetDescriptor, - jdbcContext, - options.getConfigurationValues(), - true - ); - - doCreation( metadata, jdbcContext.getDialect(), options, contributableInclusionFilter, sourceDescriptor, targets ); } @Internal @@ -162,34 +161,38 @@ public class SchemaCreatorImpl implements SchemaCreator { ContributableMatcher contributableInclusionFilter, SourceDescriptor sourceDescriptor, GenerationTarget... targets) { - final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); - - final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() ); + final SqlScriptCommandExtractor commandExtractor = getCommandExtractor(); + final boolean format = interpretFormattingEnabled( options.getConfigurationValues() ); final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter(); switch ( sourceDescriptor.getSourceType() ) { - case SCRIPT: { + case SCRIPT: createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); break; - } - case METADATA: { + case METADATA: createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); break; - } - case METADATA_THEN_SCRIPT: { + case METADATA_THEN_SCRIPT: createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); break; - } - case SCRIPT_THEN_METADATA: { + case SCRIPT_THEN_METADATA: createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); - } + break; } applyImportSources( options, commandExtractor, format, dialect, targets ); } + private SqlScriptCommandExtractor getCommandExtractor() { + return tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); + } + + private ClassLoaderService getClassLoaderService() { + return tool.getServiceRegistry().getService( ClassLoaderService.class ); + } + public void createFromScript( ScriptSourceInput scriptSourceInput, SqlScriptCommandExtractor commandExtractor, @@ -200,9 +203,8 @@ public class SchemaCreatorImpl implements SchemaCreator { final List commands = scriptSourceInput.extract( reader -> commandExtractor.extractCommands( reader, dialect ) ); - - for ( int i = 0; i < commands.size(); i++ ) { - applySqlString( commands.get( i ), formatter, options, targets ); + for ( String command : commands ) { + applySqlString( command, formatter, options, targets ); } } @@ -223,6 +225,15 @@ public class SchemaCreatorImpl implements SchemaCreator { ); } + private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) { + final Database database = metadata.getDatabase(); + return SqlStringGenerationContextImpl.fromConfigurationMap( + database.getJdbcEnvironment(), + database, + options.getConfigurationValues() + ); + } + @Internal public void createFromMetadata( Metadata metadata, @@ -231,256 +242,315 @@ public class SchemaCreatorImpl implements SchemaCreator { Dialect dialect, Formatter formatter, GenerationTarget... targets) { - boolean tryToCreateCatalogs = false; - boolean tryToCreateSchemas = false; - if ( options.shouldManageNamespaces() ) { - if ( dialect.canCreateSchema() ) { - tryToCreateSchemas = true; - } - if ( dialect.canCreateCatalog() ) { - tryToCreateCatalogs = true; - } - } - - final Database database = metadata.getDatabase(); - final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment(); - SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( - jdbcEnvironment, database, options.getConfigurationValues() ); - - final Set exportIdentifiers = CollectionHelper.setOfSize( 50 ); - - // first, create each catalog/schema - if ( tryToCreateCatalogs || tryToCreateSchemas ) { - Set exportedCatalogs = new HashSet<>(); - for ( Namespace namespace : database.getNamespaces() ) { - - if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - - if ( tryToCreateCatalogs ) { - final Identifier catalogLogicalName = namespace.getName().getCatalog(); - final Identifier catalogPhysicalName = - sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() ); - - if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) { - applySqlStrings( - dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ), - formatter, - options, - targets - ); - exportedCatalogs.add( catalogLogicalName ); - } - } - - final Identifier schemaPhysicalName = - sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() ); - if ( tryToCreateSchemas && schemaPhysicalName != null ) { - applySqlStrings( - dialect.getCreateSchemaCommand( schemaPhysicalName.render( dialect ) ), - formatter, - options, - targets - ); - } - } - } + final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata ); + final Set exportIdentifiers = setOfSize( 50 ); + createSchemasAndCatalogs( metadata, options, dialect, formatter, context, targets ); // next, create all UDTs - for ( Namespace namespace : database.getNamespaces() ) { - - if ( !options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - - for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) { - applySqlStrings( - dialect.getUserDefinedTypeExporter().getSqlCreateStrings( - userDefinedType, - metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } - } - + createUserDefinedTypes( metadata, options, dialect, formatter, context, targets ); // next, create all "before table" auxiliary objects - for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) { - if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) { - continue; - } - - if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { - checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers ); - applySqlStrings( - dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( - auxiliaryDatabaseObject, - metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } - } - + createAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets ); // then, create all schema objects (tables, sequences, constraints, etc) in each schema - for ( Namespace namespace : database.getNamespaces() ) { - - if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - - // sequences - for ( Sequence sequence : namespace.getSequences() ) { - if ( ! options.getSchemaFilter().includeSequence( sequence ) ) { - continue; - } - - if ( ! contributableInclusionMatcher.matches( sequence ) ) { - continue; - } - - checkExportIdentifier( sequence, exportIdentifiers ); - - applySqlStrings( - dialect.getSequenceExporter().getSqlCreateStrings( - sequence, - metadata, - sqlStringGenerationContext - ), -// dialect.getCreateSequenceStrings( -// jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), dialect ), -// sequence.getInitialValue(), -// sequence.getIncrementSize() -// ), - formatter, - options, - targets - ); - } - - // tables - for ( Table table : namespace.getTables() ) { - if ( !table.isPhysicalTable() ){ - continue; - } - - if ( ! options.getSchemaFilter().includeTable( table ) ) { - continue; - } - - if ( ! contributableInclusionMatcher.matches( table ) ) { - continue; - } - - checkExportIdentifier( table, exportIdentifiers ); - - applySqlStrings( - dialect.getTableExporter().getSqlCreateStrings( table, metadata, sqlStringGenerationContext ), - formatter, - options, - targets - ); - - } - - for ( Table table : namespace.getTables() ) { - if ( !table.isPhysicalTable() ){ - continue; - } - if ( ! options.getSchemaFilter().includeTable( table ) ) { - continue; - } - - if ( ! contributableInclusionMatcher.matches( table ) ) { - continue; - } - - // indexes - for ( Index index : table.getIndexes().values() ) { - checkExportIdentifier( index, exportIdentifiers ); - applySqlStrings( - dialect.getIndexExporter().getSqlCreateStrings( index, metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } - - // unique keys - for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) { - checkExportIdentifier( uniqueKey, exportIdentifiers ); - applySqlStrings( - dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } - } - } - - //NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420 - for ( Namespace namespace : database.getNamespaces() ) { - // NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390 - - if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - - for ( Table table : namespace.getTables() ) { - if ( ! options.getSchemaFilter().includeTable( table ) ) { - continue; - } - - if ( ! contributableInclusionMatcher.matches( table ) ) { - continue; - } - - // foreign keys - for ( ForeignKey foreignKey : table.getForeignKeys().values() ) { - applySqlStrings( - dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } - } - } - + createSequencesTablesConstraints( + metadata, + options, + contributableInclusionMatcher, + dialect, + formatter, + context, + exportIdentifiers, + targets + ); + // foreign keys must be created after all tables of all namespaces for cross-namespace constraints (see HHH-10420) + createForeignKeys( metadata, options, contributableInclusionMatcher, dialect, formatter, context, targets ); // next, create all "after table" auxiliary objects - for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) { + createAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets ); + // and finally add all init commands + executeInitCommands(metadata, options, formatter, targets); + } + + private static void executeInitCommands(Metadata metadata, ExecutionOptions options, Formatter formatter, GenerationTarget[] targets) { + for ( InitCommand initCommand : metadata.getDatabase().getInitCommands() ) { + // todo: this should alo probably use the DML formatter... + applySqlStrings( initCommand.getInitCommands(), formatter, options, targets); + } + } + + private static void createAuxiliaryObjectsAfterTables( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets) { + for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) { if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) && !auxiliaryDatabaseObject.beforeTablesOnCreation() ) { checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers ); applySqlStrings( - dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( auxiliaryDatabaseObject, metadata, - sqlStringGenerationContext - ), + dialect.getAuxiliaryDatabaseObjectExporter() + .getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ), formatter, options, targets ); } } + } - // and finally add all init commands - for ( InitCommand initCommand : database.getInitCommands() ) { - // todo: this should alo probably use the DML formatter... - applySqlStrings( initCommand.getInitCommands(), formatter, options, targets ); + private static void createForeignKeys( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher contributableInclusionMatcher, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + // foreign keys must be created after unique keys for numerous DBs (see HHH-8390) + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + for ( Table table : namespace.getTables() ) { + if ( options.getSchemaFilter().includeTable( table ) + && contributableInclusionMatcher.matches( table ) ) { + // foreign keys + for ( ForeignKey foreignKey : table.getForeignKeys().values() ) { + applySqlStrings( + dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, context ), + formatter, + options, + targets + ); + } + } + } + } + } + } + + private static void createSequencesTablesConstraints( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher contributableInclusionMatcher, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets) { + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + // sequences + createSequences( + metadata, + options, + contributableInclusionMatcher, + dialect, + formatter, + context, + exportIdentifiers, + targets, + namespace + ); + // tables + createTables( + metadata, + options, + contributableInclusionMatcher, + dialect, + formatter, + context, + exportIdentifiers, + targets, + namespace + ); + createTableConstraints( + metadata, + options, + contributableInclusionMatcher, + dialect, + formatter, + context, + exportIdentifiers, + targets, + namespace + ); + } + } + } + + private static void createTableConstraints( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher contributableInclusionMatcher, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets, + Namespace namespace) { + for ( Table table : namespace.getTables() ) { + if ( table.isPhysicalTable() + && options.getSchemaFilter().includeTable( table ) + && contributableInclusionMatcher.matches( table ) ) { + // indexes + for ( Index index : table.getIndexes().values() ) { + checkExportIdentifier( index, exportIdentifiers ); + applySqlStrings( + dialect.getIndexExporter().getSqlCreateStrings( index, metadata, context ), + formatter, + options, + targets + ); + } + // unique keys + for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) { + checkExportIdentifier( uniqueKey, exportIdentifiers ); + applySqlStrings( + dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata, context ), + formatter, + options, + targets + ); + } + } + } + } + + private static void createTables( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher contributableInclusionMatcher, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets, + Namespace namespace) { + for ( Table table : namespace.getTables() ) { + if ( table.isPhysicalTable() + && options.getSchemaFilter().includeTable( table ) + && contributableInclusionMatcher.matches( table ) ) { + checkExportIdentifier( table, exportIdentifiers ); + applySqlStrings( + dialect.getTableExporter().getSqlCreateStrings( table, metadata, context ), + formatter, + options, + targets + ); + } + } + } + + private static void createSequences( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher contributableInclusionMatcher, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets, + Namespace namespace) { + for ( Sequence sequence : namespace.getSequences() ) { + if ( options.getSchemaFilter().includeSequence( sequence ) + && contributableInclusionMatcher.matches( sequence ) ) { + checkExportIdentifier( sequence, exportIdentifiers); + applySqlStrings( + dialect.getSequenceExporter().getSqlCreateStrings( sequence, metadata, context ), + formatter, + options, + targets + ); + } + } + } + + private static void createAuxiliaryObjectsBeforeTables( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + Set exportIdentifiers, + GenerationTarget[] targets) { + for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) { + if ( auxiliaryDatabaseObject.beforeTablesOnCreation() + && auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { + checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers ); + applySqlStrings( + dialect.getAuxiliaryDatabaseObjectExporter() + .getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ), + formatter, + options, + targets + ); + } + } + } + + private static void createUserDefinedTypes( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) { + applySqlStrings( + dialect.getUserDefinedTypeExporter() + .getSqlCreateStrings( userDefinedType, metadata, context ), + formatter, + options, + targets + ); + } + } + } + } + + private static void createSchemasAndCatalogs( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + final boolean tryToCreateCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog(); + final boolean tryToCreateSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema(); + // first, create each catalog/schema + if ( tryToCreateCatalogs || tryToCreateSchemas ) { + Set exportedCatalogs = new HashSet<>(); + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + if ( tryToCreateCatalogs ) { + final Identifier catalogLogicalName = namespace.getName().getCatalog(); + final Identifier catalogPhysicalName = + context.catalogWithDefault( namespace.getPhysicalName().getCatalog() ); + if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) { + applySqlStrings( + dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ), + formatter, + options, + targets + ); + exportedCatalogs.add( catalogLogicalName ); + } + } + + final Identifier schemaPhysicalName = + context.schemaWithDefault( namespace.getPhysicalName().getSchema() ); + if ( tryToCreateSchemas && schemaPhysicalName != null ) { + applySqlStrings( + dialect.getCreateSchemaCommand( schemaPhysicalName.render( dialect ) ), + formatter, + options, + targets + ); + } + } + } } } @@ -497,12 +567,10 @@ public class SchemaCreatorImpl implements SchemaCreator { Formatter formatter, ExecutionOptions options, GenerationTarget... targets) { - if ( sqlStrings == null ) { - return; - } - - for ( String sqlString : sqlStrings ) { - applySqlString( sqlString, formatter, options, targets ); + if ( sqlStrings != null ) { + for ( String sqlString : sqlStrings ) { + applySqlString( sqlString, formatter, options, targets ); + } } } @@ -511,18 +579,16 @@ public class SchemaCreatorImpl implements SchemaCreator { Formatter formatter, ExecutionOptions options, GenerationTarget... targets) { - if ( StringHelper.isEmpty( sqlString ) ) { - return; - } - - try { - String sqlStringFormatted = formatter.format( sqlString ); - for ( GenerationTarget target : targets ) { - target.accept( sqlStringFormatted ); + if ( isNotEmpty( sqlString ) ) { + try { + final String sqlStringFormatted = formatter.format( sqlString ); + for ( GenerationTarget target : targets ) { + target.accept( sqlStringFormatted ); + } + } + catch (CommandAcceptanceException e) { + options.getExceptionHandler().handleException( e ); } - } - catch (CommandAcceptanceException e) { - options.getExceptionHandler().handleException( e ); } } @@ -532,75 +598,159 @@ public class SchemaCreatorImpl implements SchemaCreator { boolean format, Dialect dialect, GenerationTarget... targets) { - final ServiceRegistry serviceRegistry = tool.getServiceRegistry(); - final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class ); - // I have had problems applying the formatter to these imported statements. - // and legacy SchemaExport did not format them, so doing same here - //final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter(); - final Formatter formatter = FormatStyle.NONE.getFormatter(); + final Formatter formatter = getImportScriptFormatter(format); - Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE ); - if ( importScriptSetting == null ) { - importScriptSetting = options.getConfigurationValues().get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE ); - } - String charsetName = (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME ); - - boolean hasDefaultImportFileScriptBeenExecuted = false; - if ( importScriptSetting != null ) { - final ScriptSourceInput importScriptInput = interpretScriptSourceSetting( importScriptSetting, classLoaderService, charsetName ); - final URL defaultImportFileUrl = classLoaderService.locateResource( DEFAULT_IMPORT_FILE ); - if ( defaultImportFileUrl != null && importScriptInput.containsScript( defaultImportFileUrl ) ) { - hasDefaultImportFileScriptBeenExecuted = true; - } - final List commands = importScriptInput.extract( - reader -> commandExtractor.extractCommands( reader, dialect ) - ); - for ( int i = 0; i < commands.size(); i++ ) { - applySqlString( commands.get( i ), formatter, options, targets ); - } - } - - final String importFiles = ConfigurationHelper.getString( - AvailableSettings.HBM2DDL_IMPORT_FILES, - options.getConfigurationValues(), - hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE + boolean hasDefaultImportFileScriptBeenExecuted = applyImportScript( + options, + commandExtractor, + dialect, + formatter, + targets ); + applyImportFiles( + options, + commandExtractor, + dialect, + formatter, + hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE, + targets + ); + } - for ( String currentFile : importFiles.split( "," ) ) { - final String resourceName = currentFile.trim(); - if ( resourceName.isEmpty() ) { - //skip empty resource names - continue; - } - final ScriptSourceInput importScriptInput = interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName ); - final List commands = importScriptInput.extract( - reader -> commandExtractor.extractCommands( reader, dialect ) + /** + * In principle, we should format the commands in the import script if the + * {@code format} parameter is {@code true}, and since it's supposed to be + * a list of DML statements, we should use the {@linkplain FormatStyle#BASIC + * basic DML formatter} to do that. However, in practice we don't really know + * much about what this file contains, and we have never formatted it in the + * past, so there's no compelling reason to start now. In fact, if we have + * lists of many {@code insert} statements on the same table, which is what + * we typically expect, it's probably better to not format. + */ + private static Formatter getImportScriptFormatter(boolean format) { +// return format ? FormatStyle.BASIC.getFormatter() : FormatStyle.NONE.getFormatter(); + return FormatStyle.NONE.getFormatter(); + } + + /** + * Handles import scripts specified using + * {@link org.hibernate.cfg.AvailableSettings#HBM2DDL_IMPORT_FILES}. + * + * @return {@code true} if the legacy {@linkplain #DEFAULT_IMPORT_FILE default import file} + * was one of the listed imported files that were executed + */ + private boolean applyImportScript( + ExecutionOptions options, + SqlScriptCommandExtractor commandExtractor, + Dialect dialect, + Formatter formatter, + GenerationTarget[] targets) { + final Object importScriptSetting = getImportScriptSetting( options ); + if ( importScriptSetting != null ) { + final ScriptSourceInput importScriptInput = + interpretScriptSourceSetting( importScriptSetting, getClassLoaderService(), getCharsetName( options ) ); + applyImportScript( + options, + commandExtractor, + dialect, + importScriptInput, + formatter, + targets ); - for ( int i = 0; i < commands.size(); i++ ) { - applySqlString( commands.get( i ), formatter, options, targets ); + return containsDefaultImportFile( importScriptInput ); + } + else { + return false; + } + } + + private boolean containsDefaultImportFile(ScriptSourceInput importScriptInput) { + final URL defaultImportFileUrl = getClassLoaderService().locateResource( DEFAULT_IMPORT_FILE ); + return defaultImportFileUrl != null && importScriptInput.containsScript(defaultImportFileUrl); + } + + /** + * Handles import scripts specified using + * {@link org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE}. + */ + private void applyImportFiles( + ExecutionOptions options, + SqlScriptCommandExtractor commandExtractor, + Dialect dialect, + Formatter formatter, + String defaultImportFile, + GenerationTarget[] targets) { + final String[] importFiles = + getString( HBM2DDL_IMPORT_FILES, options.getConfigurationValues(), defaultImportFile ) + .split( "," ); + final String charsetName = getCharsetName( options ); + final ClassLoaderService classLoaderService = getClassLoaderService(); + for ( String currentFile : importFiles ) { + final String resourceName = currentFile.trim(); + if ( !resourceName.isEmpty() ) { //skip empty resource names + applyImportScript( + options, + commandExtractor, + dialect, + interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName ), + formatter, + targets + ); } } } + private static void applyImportScript( + ExecutionOptions options, + SqlScriptCommandExtractor commandExtractor, + Dialect dialect, + ScriptSourceInput importScriptInput, + Formatter formatter, + GenerationTarget[] targets) { + final List commands = importScriptInput.extract( + reader -> commandExtractor.extractCommands( reader, dialect ) + ); + for ( String command : commands ) { + applySqlString( command, formatter, options, targets ); + } + } + private ScriptSourceInput interpretLegacyImportScriptSetting( String resourceName, ClassLoaderService classLoaderService, String charsetName) { try { final URL resourceUrl = classLoaderService.locateResource( resourceName ); - if ( resourceUrl == null ) { - return ScriptSourceInputNonExistentImpl.INSTANCE; - } - else { - return new ScriptSourceInputFromUrl( resourceUrl, charsetName ); - } + return resourceUrl == null + ? ScriptSourceInputNonExistentImpl.INSTANCE + : new ScriptSourceInputFromUrl( resourceUrl, charsetName ); } catch (Exception e) { throw new SchemaManagementException( "Error resolving legacy import resource : " + resourceName, e ); } } + /** + * @see org.hibernate.cfg.AvailableSettings#HBM2DDL_CHARSET_NAME + */ + private static String getCharsetName(ExecutionOptions options) { + return (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME ); + } + + /** + * @see org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE + * + * @return a {@link java.io.Reader} or a string URL + */ + private static Object getImportScriptSetting(ExecutionOptions options) { + final Map configuration = options.getConfigurationValues(); + final Object importScriptSetting = configuration.get( HBM2DDL_LOAD_SCRIPT_SOURCE ); + return importScriptSetting == null + ? configuration.get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE ) + : importScriptSetting; + } + /** * For testing... * @@ -650,7 +800,9 @@ public class SchemaCreatorImpl implements SchemaCreator { Metadata metadata, final boolean manageNamespaces, GenerationTarget... targets) { - final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(); + final ServiceRegistry serviceRegistry = + ( (MetadataImplementor) metadata ).getMetadataBuildingOptions() + .getServiceRegistry(); doCreation( metadata, serviceRegistry, diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java index da51c02309..24cac9739c 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java @@ -37,8 +37,6 @@ import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; import org.hibernate.engine.jdbc.spi.SqlStatementLogger; import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.internal.util.StringHelper; -import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.Table; import org.hibernate.mapping.UserDefinedType; @@ -65,9 +63,12 @@ import org.hibernate.tool.schema.spi.TargetDescriptor; import org.jboss.logging.Logger; +import static org.hibernate.internal.util.StringHelper.isNotEmpty; +import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize; +import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled; + /** - * This is functionally nothing more than the creation script from the older SchemaExport class (plus some - * additional stuff in the script). + * Basic implementation of {@link SchemaDropper}. * * @author Steve Ebersole */ @@ -105,18 +106,21 @@ public class SchemaDropperImpl implements SchemaDropper { public void doDrop( Metadata metadata, ExecutionOptions options, - ContributableMatcher contributableInclusionFilter, + ContributableMatcher inclusionFilter, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor) { - - if ( targetDescriptor.getTargetTypes().isEmpty() ) { - return; + if ( !targetDescriptor.getTargetTypes().isEmpty() ) { + final Map configuration = options.getConfigurationValues(); + final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration ); + doDrop( + metadata, + options, + inclusionFilter, + jdbcContext.getDialect(), + sourceDescriptor, + tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true ) + ); } - - final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); - final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true ); - - doDrop( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), sourceDescriptor, targets ); } /** @@ -129,7 +133,7 @@ public class SchemaDropperImpl implements SchemaDropper { Dialect dialect, SourceDescriptor sourceDescriptor, GenerationTarget... targets) { - doDrop( metadata, options, (contributed) -> true, dialect, sourceDescriptor, targets ); + doDrop( metadata, options, contributed -> true, dialect, sourceDescriptor, targets ); } /** @@ -139,7 +143,7 @@ public class SchemaDropperImpl implements SchemaDropper { public void doDrop( Metadata metadata, ExecutionOptions options, - ContributableMatcher contributableInclusionFilter, + ContributableMatcher inclusionFilter, Dialect dialect, SourceDescriptor sourceDescriptor, GenerationTarget... targets) { @@ -148,7 +152,7 @@ public class SchemaDropperImpl implements SchemaDropper { } try { - performDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, targets ); + performDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, targets ); } finally { for ( GenerationTarget target : targets ) { @@ -165,30 +169,36 @@ public class SchemaDropperImpl implements SchemaDropper { private void performDrop( Metadata metadata, ExecutionOptions options, - ContributableMatcher contributableInclusionFilter, + ContributableMatcher inclusionFilter, Dialect dialect, SourceDescriptor sourceDescriptor, GenerationTarget... targets) { - final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); - final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() ); + final SqlScriptCommandExtractor commandExtractor = getCommandExtractor(); + final boolean format = interpretFormattingEnabled( options.getConfigurationValues() ); final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter(); - if ( sourceDescriptor.getSourceType() == SourceType.SCRIPT ) { - dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); - } - else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) { - dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); - } - else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) { - dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); - dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); - } - else { - dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); - dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); + switch ( sourceDescriptor.getSourceType() ) { + case SCRIPT: + dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); + break; + case METADATA: + dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets ); + break; + case METADATA_THEN_SCRIPT: + dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets ); + dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); + break; + case SCRIPT_THEN_METADATA: + dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); + dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets ); + break; } } + private SqlScriptCommandExtractor getCommandExtractor() { + return tool.getServiceRegistry().getService(SqlScriptCommandExtractor.class); + } + private void dropFromScript( ScriptSourceInput scriptSourceInput, SqlScriptCommandExtractor commandExtractor, @@ -199,175 +209,245 @@ public class SchemaDropperImpl implements SchemaDropper { final List commands = scriptSourceInput.extract( reader -> commandExtractor.extractCommands( reader, dialect ) ); - for ( int i = 0; i < commands.size(); i++ ) { - applySqlString( commands.get( i ), formatter, options, targets ); + for ( String command : commands ) { + applySqlString( command, formatter, options, targets ); } } + private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) { + final Database database = metadata.getDatabase(); + return SqlStringGenerationContextImpl.fromConfigurationMap( + database.getJdbcEnvironment(), + database, + options.getConfigurationValues() + ); + } + private void dropFromMetadata( Metadata metadata, ExecutionOptions options, - ContributableMatcher contributableInclusionFilter, + ContributableMatcher inclusionFilter, Dialect dialect, Formatter formatter, GenerationTarget... targets) { - final Database database = metadata.getDatabase(); - SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( - metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues()); - - boolean tryToDropCatalogs = false; - boolean tryToDropSchemas = false; - if ( options.shouldManageNamespaces() ) { - if ( dialect.canCreateSchema() ) { - tryToDropSchemas = true; - } - if ( dialect.canCreateCatalog() ) { - tryToDropCatalogs = true; - } - } - - final Set exportIdentifiers = CollectionHelper.setOfSize( 50 ); // NOTE : init commands are irrelevant for dropping... + final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata ); // Reverse the list on drop to retain possible dependencies - final Collection reversedAuxiliaryDatabaseObjects = reverse( database.getAuxiliaryDatabaseObjects() ); + dropAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, targets ); + dropConstraintsTablesSequences( + metadata, + options, + inclusionFilter, + dialect, + formatter, + context, + targets + ); + dropAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, targets ); + dropUserDefinedTypes( metadata, options, dialect, formatter, context, targets ); + dropSchemasAndCatalogs( metadata, options, dialect, formatter, targets ); + } - for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) { - if ( auxiliaryDatabaseObject.beforeTablesOnCreation() ) { - continue; - } - if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { - continue; - } + private void dropConstraintsTablesSequences( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher inclusionFilter, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + final Set exportIdentifiers = setOfSize( 50 ); + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { - applySqlStrings( - dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); - } + // we need to drop all constraints/indexes prior to dropping the tables + applyConstraintDropping( + namespace, + metadata, + formatter, + options, + context, + inclusionFilter, + targets + ); - for ( Namespace namespace : database.getNamespaces() ) { + // now it's safe to drop the tables + dropTables( + metadata, + options, + inclusionFilter, + dialect, + formatter, + exportIdentifiers, + context, + namespace, + targets + ); - if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - - // we need to drop all constraints/indexes prior to dropping the tables - applyConstraintDropping( namespace, metadata, formatter, options, sqlStringGenerationContext, - contributableInclusionFilter, targets ); - - // now it's safe to drop the tables - for ( Table table : namespace.getTables() ) { - if ( ! table.isPhysicalTable() ) { - continue; - } - if ( ! options.getSchemaFilter().includeTable( table ) ) { - continue; - } - if ( ! contributableInclusionFilter.matches( table ) ) { - continue; - } - checkExportIdentifier( table, exportIdentifiers ); - - applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata, - sqlStringGenerationContext - ), formatter, options,targets ); - } - - for ( Sequence sequence : namespace.getSequences() ) { - if ( ! options.getSchemaFilter().includeSequence( sequence ) ) { - continue; - } - if ( ! contributableInclusionFilter.matches( sequence ) ) { - continue; - } - checkExportIdentifier( sequence, exportIdentifiers ); - - applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, - sqlStringGenerationContext - ), formatter, options, targets ); + dropSequences( + metadata, + options, + inclusionFilter, + dialect, + formatter, + exportIdentifiers, + context, + namespace, + targets + ); } } + } - for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) { - if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) { - continue; - } - if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { - continue; - } - - applySqlStrings( - auxiliaryDatabaseObject.sqlDropStrings( sqlStringGenerationContext ), - formatter, - options, - targets - ); - } - - for ( Namespace namespace : database.getNamespaces() ) { - - if ( !options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } - final List dependencyOrderedUserDefinedTypes = namespace.getDependencyOrderedUserDefinedTypes(); - Collections.reverse( dependencyOrderedUserDefinedTypes ); - for ( UserDefinedType userDefinedType : dependencyOrderedUserDefinedTypes ) { + private static void dropAuxiliaryObjectsBeforeTables( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : + reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) { + if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() + && auxiliaryDatabaseObject.appliesToDialect(dialect) ) { applySqlStrings( - dialect.getUserDefinedTypeExporter() - .getSqlDropStrings( userDefinedType, metadata, sqlStringGenerationContext ), + dialect.getAuxiliaryDatabaseObjectExporter() + .getSqlDropStrings( auxiliaryDatabaseObject, metadata, context ), formatter, options, targets ); } } + } - if ( tryToDropCatalogs || tryToDropSchemas ) { - Set exportedCatalogs = new HashSet<>(); + private static void dropAuxiliaryObjectsAfterTables( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : + reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) { + if ( auxiliaryDatabaseObject.beforeTablesOnCreation() + && auxiliaryDatabaseObject.appliesToDialect(dialect) ) { + applySqlStrings( + auxiliaryDatabaseObject.sqlDropStrings( context ), + formatter, + options, + targets + ); + } + } + } - for ( Namespace namespace : database.getNamespaces() ) { + private static void dropSequences( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher inclusionFilter, + Dialect dialect, + Formatter formatter, + Set exportIdentifiers, + SqlStringGenerationContext context, + Namespace namespace, + GenerationTarget[] targets) { + for ( Sequence sequence : namespace.getSequences() ) { + if ( options.getSchemaFilter().includeSequence( sequence ) + && inclusionFilter.matches( sequence ) ) { + checkExportIdentifier( sequence, exportIdentifiers); + applySqlStrings( + dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, context ), + formatter, + options, + targets + ); + } + } + } - if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { - continue; - } + private static void dropTables( + Metadata metadata, + ExecutionOptions options, + ContributableMatcher inclusionFilter, + Dialect dialect, + Formatter formatter, + Set exportIdentifiers, + SqlStringGenerationContext context, + Namespace namespace, + GenerationTarget[] targets) { + for ( Table table : namespace.getTables() ) { + if ( table.isPhysicalTable() + && options.getSchemaFilter().includeTable( table ) + && inclusionFilter.matches( table ) ) { + checkExportIdentifier( table, exportIdentifiers); + applySqlStrings( + dialect.getTableExporter().getSqlDropStrings( table, metadata, context), + formatter, + options, + targets + ); + } + } + } - if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) { + private static void dropUserDefinedTypes( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + SqlStringGenerationContext context, + GenerationTarget[] targets) { + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + final List dependencyOrderedUserDefinedTypes = namespace.getDependencyOrderedUserDefinedTypes(); + Collections.reverse( dependencyOrderedUserDefinedTypes ); + for ( UserDefinedType userDefinedType : dependencyOrderedUserDefinedTypes ) { applySqlStrings( - dialect.getDropSchemaCommand( - namespace.getPhysicalName().getSchema().render( dialect ) - ), + dialect.getUserDefinedTypeExporter() + .getSqlDropStrings( userDefinedType, metadata, context ), formatter, options, targets ); } - if ( tryToDropCatalogs ) { - final Identifier catalogLogicalName = namespace.getName().getCatalog(); - final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog(); + } + } + } - if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) { - applySqlStrings( - dialect.getDropCatalogCommand( - catalogPhysicalName.render( dialect ) - ), - formatter, - options, - targets - ); - exportedCatalogs.add( catalogLogicalName ); + private static void dropSchemasAndCatalogs( + Metadata metadata, + ExecutionOptions options, + Dialect dialect, + Formatter formatter, + GenerationTarget[] targets) { + boolean tryToDropCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog(); + boolean tryToDropSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema(); + if ( tryToDropCatalogs || tryToDropSchemas) { + final Set exportedCatalogs = new HashSet<>(); + for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { + if ( options.getSchemaFilter().includeNamespace( namespace ) ) { + if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) { + final String schemaName = namespace.getPhysicalName().getSchema().render( dialect ); + applySqlStrings( dialect.getDropSchemaCommand( schemaName ), formatter, options, targets); + } + if (tryToDropCatalogs) { + final Identifier catalogLogicalName = namespace.getName().getCatalog(); + final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog(); + if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) { + final String catalogName = catalogPhysicalName.render( dialect ); + applySqlStrings( dialect.getDropCatalogCommand( catalogName ), formatter, options, targets ); + exportedCatalogs.add( catalogLogicalName ); + } } } } } } - private Collection reverse(Collection auxiliaryDatabaseObjects) { + private static Collection reverse(Collection auxiliaryDatabaseObjects) { final List list = new ArrayList<>( auxiliaryDatabaseObjects ); Collections.reverse( list ); return list; @@ -378,35 +458,24 @@ public class SchemaDropperImpl implements SchemaDropper { Metadata metadata, Formatter formatter, ExecutionOptions options, - SqlStringGenerationContext sqlStringGenerationContext, - ContributableMatcher contributableInclusionFilter, + SqlStringGenerationContext context, + ContributableMatcher inclusionFilter, GenerationTarget... targets) { final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); - - if ( !dialect.dropConstraints() ) { - return; - } - - for ( Table table : namespace.getTables() ) { - if ( !table.isPhysicalTable() ) { - continue; - } - if ( ! options.getSchemaFilter().includeTable( table ) ) { - continue; - } - if ( ! contributableInclusionFilter.matches( table ) ) { - continue; - } - - for ( ForeignKey foreignKey : table.getForeignKeys().values() ) { - applySqlStrings( - dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, - sqlStringGenerationContext - ), - formatter, - options, - targets - ); + if ( dialect.dropConstraints() ) { + for ( Table table : namespace.getTables() ) { + if ( table.isPhysicalTable() + && options.getSchemaFilter().includeTable( table ) + && inclusionFilter.matches( table ) ) { + for ( ForeignKey foreignKey : table.getForeignKeys().values() ) { + applySqlStrings( + dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, context ), + formatter, + options, + targets + ); + } + } } } } @@ -424,12 +493,10 @@ public class SchemaDropperImpl implements SchemaDropper { Formatter formatter, ExecutionOptions options, GenerationTarget... targets) { - if ( sqlStrings == null ) { - return; - } - - for ( String sqlString : sqlStrings ) { - applySqlString( sqlString, formatter, options, targets ); + if ( sqlStrings != null ) { + for ( String sqlString : sqlStrings ) { + applySqlString( sqlString, formatter, options, targets ); + } } } @@ -438,73 +505,28 @@ public class SchemaDropperImpl implements SchemaDropper { Formatter formatter, ExecutionOptions options, GenerationTarget... targets) { - if ( StringHelper.isEmpty( sqlString ) ) { - return; - } - - String sqlStringFormatted = formatter.format( sqlString ); - for ( GenerationTarget target : targets ) { - try { - target.accept( sqlStringFormatted ); - } - catch (CommandAcceptanceException e) { - options.getExceptionHandler().handleException( e ); + if ( isNotEmpty( sqlString ) ) { + final String sqlStringFormatted = formatter.format( sqlString ); + for ( GenerationTarget target : targets ) { + try { + target.accept( sqlStringFormatted ); + } + catch (CommandAcceptanceException e) { + options.getExceptionHandler().handleException( e ); + } } } } - /** - * For testing... - * - * @param metadata The metadata for which to generate the creation commands. - * - * @return The generation commands - */ - public List generateDropCommands(Metadata metadata, final boolean manageNamespaces) { - final JournalingGenerationTarget target = new JournalingGenerationTarget(); - - final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions() - .getServiceRegistry(); - final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect(); - - final ExecutionOptions options = new ExecutionOptions() { - @Override - public boolean shouldManageNamespaces() { - return manageNamespaces; - } - - @Override - public Map getConfigurationValues() { - return Collections.emptyMap(); - } - - @Override - public ExceptionHandler getExceptionHandler() { - return ExceptionHandlerHaltImpl.INSTANCE; - } - - @Override - public SchemaFilter getSchemaFilter() { - return schemaFilter; - } - }; - - dropFromMetadata( metadata, options, (contributed) -> true, dialect, FormatStyle.NONE.getFormatter(), target ); - - return target.commands; - } - @Override public DelayedDropAction buildDelayedAction( Metadata metadata, ExecutionOptions options, - ContributableMatcher contributableInclusionFilter, + ContributableMatcher inclusionFilter, SourceDescriptor sourceDescriptor) { final JournalingGenerationTarget target = new JournalingGenerationTarget(); - final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect(); - doDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, target ); - + doDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, target ); return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() ); } @@ -512,7 +534,9 @@ public class SchemaDropperImpl implements SchemaDropper { * For tests */ public void doDrop(Metadata metadata, boolean manageNamespaces, GenerationTarget... targets) { - final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(); + final ServiceRegistry serviceRegistry = + ( (MetadataImplementor) metadata ).getMetadataBuildingOptions() + .getServiceRegistry(); doDrop( metadata, serviceRegistry, @@ -535,7 +559,8 @@ public class SchemaDropperImpl implements SchemaDropper { final JdbcContext jdbcContext = tool.resolveJdbcContext( settings ); targets = new GenerationTarget[] { new GenerationTargetToDatabase( - serviceRegistry.getService( TransactionCoordinatorBuilder.class ).buildDdlTransactionIsolator( jdbcContext ), + serviceRegistry.getService( TransactionCoordinatorBuilder.class ) + .buildDdlTransactionIsolator( jdbcContext ), true ) }; diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaTruncatorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaTruncatorImpl.java index b70f6cead5..10e07e28c0 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaTruncatorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaTruncatorImpl.java @@ -49,6 +49,8 @@ import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SO import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting; /** + * Basic implementation of {@link SchemaTruncator}. + * * @author Gavin King */ public class SchemaTruncatorImpl implements SchemaTruncator { @@ -119,7 +121,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { Formatter formatter, GenerationTarget... targets) { final Database database = metadata.getDatabase(); - SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( + SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap( metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues() ); @@ -131,7 +133,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { continue; } - disableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext, + disableConstraints( namespace, metadata, formatter, options, context, contributableInclusionFilter, targets ); applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options,targets ); @@ -151,7 +153,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { list.add( table ); } applySqlStrings( dialect.getTableCleaner().getSqlTruncateStrings( list, metadata, - sqlStringGenerationContext + context ), formatter, options,targets ); //TODO: reset the sequences? @@ -165,12 +167,12 @@ public class SchemaTruncatorImpl implements SchemaTruncator { // checkExportIdentifier( sequence, exportIdentifiers ); // // applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, -// sqlStringGenerationContext +// context // ), formatter, options, targets ); // } applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options,targets ); - enableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext, + enableConstraints( namespace, metadata, formatter, options, context, contributableInclusionFilter, targets ); } @@ -184,7 +186,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { Metadata metadata, Formatter formatter, ExecutionOptions options, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, ContributableMatcher contributableInclusionFilter, GenerationTarget... targets) { final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); @@ -204,7 +206,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { if ( dialect.canDisableConstraints() ) { applySqlString( dialect.getTableCleaner().getSqlDisableConstraintString( foreignKey, metadata, - sqlStringGenerationContext + context ), formatter, options, @@ -214,7 +216,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { else if ( !dialect.canBatchTruncate() ) { applySqlStrings( dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, - sqlStringGenerationContext + context ), formatter, options, @@ -230,7 +232,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { Metadata metadata, Formatter formatter, ExecutionOptions options, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, ContributableMatcher contributableInclusionFilter, GenerationTarget... targets) { final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); @@ -250,7 +252,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { if ( dialect.canDisableConstraints() ) { applySqlString( dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata, - sqlStringGenerationContext + context ), formatter, options, @@ -260,7 +262,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator { else if ( !dialect.canBatchTruncate() ) { applySqlStrings( dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, - sqlStringGenerationContext + context ), formatter, options, diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/StandardTableMigrator.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/StandardTableMigrator.java index 63cfcf669e..bf55129656 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/StandardTableMigrator.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/StandardTableMigrator.java @@ -50,8 +50,8 @@ public class StandardTableMigrator implements TableMigrator { Table table, Metadata metadata, TableInformation tableInfo, - SqlStringGenerationContext sqlStringGenerationContext) { - return sqlAlterStrings( table, dialect, metadata, tableInfo, sqlStringGenerationContext ) + SqlStringGenerationContext context) { + return sqlAlterStrings( table, dialect, metadata, tableInfo, context ) .toArray( EMPTY_STRING_ARRAY ); } @@ -61,9 +61,9 @@ public class StandardTableMigrator implements TableMigrator { Dialect dialect, Metadata metadata, TableInformation tableInformation, - SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException { + SqlStringGenerationContext context) throws HibernateException { - final String tableName = sqlStringGenerationContext.format( new QualifiedTableName( + final String tableName = context.format( new QualifiedTableName( Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ), Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ), table.getNameIdentifier() ) @@ -80,7 +80,7 @@ public class StandardTableMigrator implements TableMigrator { if ( columnInformation == null ) { // the column doesn't exist at all. final String addColumn = dialect.getAddColumnString() + ' ' - + getFullColumnDeclaration( column, table, metadata, dialect, sqlStringGenerationContext ) + + getFullColumnDeclaration( column, table, metadata, dialect, context ) + dialect.getAddColumnSuffixString(); results.add( alterTable + addColumn ); } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/TableMigrator.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/TableMigrator.java index eda268dfbe..cf0a9f4172 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/TableMigrator.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/TableMigrator.java @@ -24,5 +24,5 @@ public interface TableMigrator { Table table, Metadata metadata, TableInformation tableInfo, - SqlStringGenerationContext sqlStringGenerationContext); + SqlStringGenerationContext context); } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/exec/ImprovedExtractionContextImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/exec/ImprovedExtractionContextImpl.java index 479419e0ab..a1a27d8898 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/exec/ImprovedExtractionContextImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/exec/ImprovedExtractionContextImpl.java @@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext; public class ImprovedExtractionContextImpl implements ExtractionContext { private final ServiceRegistry serviceRegistry; private final JdbcEnvironment jdbcEnvironment; - private final SqlStringGenerationContext sqlStringGenerationContext; + private final SqlStringGenerationContext context; private final DdlTransactionIsolator ddlTransactionIsolator; private final DatabaseObjectAccess databaseObjectAccess; @@ -33,12 +33,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext { public ImprovedExtractionContextImpl( ServiceRegistry serviceRegistry, JdbcEnvironment jdbcEnvironment, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, DdlTransactionIsolator ddlTransactionIsolator, DatabaseObjectAccess databaseObjectAccess) { this.serviceRegistry = serviceRegistry; this.jdbcEnvironment = jdbcEnvironment; - this.sqlStringGenerationContext = sqlStringGenerationContext; + this.context = context; this.ddlTransactionIsolator = ddlTransactionIsolator; this.databaseObjectAccess = databaseObjectAccess; } @@ -55,7 +55,7 @@ public class ImprovedExtractionContextImpl implements ExtractionContext { @Override public SqlStringGenerationContext getSqlStringGenerationContext() { - return sqlStringGenerationContext; + return context; } @Override @@ -81,12 +81,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext { @Override public Identifier getDefaultCatalog() { - return sqlStringGenerationContext.getDefaultCatalog(); + return context.getDefaultCatalog(); } @Override public Identifier getDefaultSchema() { - return sqlStringGenerationContext.getDefaultSchema(); + return context.getDefaultSchema(); } @Override diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/spi/ExtractionTool.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/spi/ExtractionTool.java index 6a38c73620..c41aeff2d6 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/spi/ExtractionTool.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/spi/ExtractionTool.java @@ -7,7 +7,6 @@ package org.hibernate.tool.schema.spi; import org.hibernate.Incubating; -import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.relational.SqlStringGenerationContext; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; import org.hibernate.resource.transaction.spi.DdlTransactionIsolator; @@ -27,7 +26,7 @@ public interface ExtractionTool { ExtractionContext createExtractionContext( ServiceRegistry serviceRegistry, JdbcEnvironment jdbcEnvironment, - SqlStringGenerationContext sqlStringGenerationContext, + SqlStringGenerationContext context, DdlTransactionIsolator ddlTransactionIsolator, ExtractionContext.DatabaseObjectAccess databaseObjectAccess);