refactor SchemaCreatorImpl, SchemaDropperImpl into bite-sized methods

This commit is contained in:
Gavin 2022-12-19 01:07:28 +01:00 committed by Gavin King
parent 32790456b7
commit 782d2c9707
15 changed files with 825 additions and 644 deletions

View File

@ -32,7 +32,7 @@ import org.hibernate.tool.schema.spi.SchemaManagementTool;
public class DatabaseInformationImpl public class DatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess { implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment; private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext; private final SqlStringGenerationContext context;
private final ExtractionContext extractionContext; private final ExtractionContext extractionContext;
private final InformationExtractor extractor; private final InformationExtractor extractor;
@ -41,15 +41,15 @@ public class DatabaseInformationImpl
public DatabaseInformationImpl( public DatabaseInformationImpl(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator, DdlTransactionIsolator ddlTransactionIsolator,
SchemaManagementTool tool) throws SQLException { SchemaManagementTool tool) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment; this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext; this.context = context;
this.extractionContext = tool.getExtractionTool().createExtractionContext( this.extractionContext = tool.getExtractionTool().createExtractionContext(
serviceRegistry, serviceRegistry,
jdbcEnvironment, jdbcEnvironment,
sqlStringGenerationContext, context,
ddlTransactionIsolator, ddlTransactionIsolator,
this this
); );
@ -80,13 +80,13 @@ public class DatabaseInformationImpl
@Override @Override
public boolean catalogExists(Identifier catalog) { public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( sqlStringGenerationContext.catalogWithDefault( catalog ) ); return extractor.catalogExists( context.catalogWithDefault( catalog ) );
} }
@Override @Override
public boolean schemaExists(Namespace.Name namespace) { public boolean schemaExists(Namespace.Name namespace) {
return extractor.schemaExists( sqlStringGenerationContext.catalogWithDefault( namespace.getCatalog() ), return extractor.schemaExists( context.catalogWithDefault( namespace.getCatalog() ),
sqlStringGenerationContext.schemaWithDefault( namespace.getSchema() ) ); context.schemaWithDefault( namespace.getSchema() ) );
} }
@Override @Override
@ -111,16 +111,16 @@ public class DatabaseInformationImpl
} }
return extractor.getTable( return extractor.getTable(
sqlStringGenerationContext.catalogWithDefault( tableName.getCatalogName() ), context.catalogWithDefault( tableName.getCatalogName() ),
sqlStringGenerationContext.schemaWithDefault( tableName.getSchemaName() ), context.schemaWithDefault( tableName.getSchemaName() ),
tableName.getTableName() tableName.getTableName()
); );
} }
@Override @Override
public NameSpaceTablesInformation getTablesInformation(Namespace namespace) { public NameSpaceTablesInformation getTablesInformation(Namespace namespace) {
return extractor.getTables( sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() ), return extractor.getTables( context.catalogWithDefault( namespace.getPhysicalName().getCatalog() ),
sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() ) ); context.schemaWithDefault( namespace.getPhysicalName().getSchema() ) );
} }
@Override @Override

View File

@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
public class ExtractionContextImpl implements ExtractionContext { public class ExtractionContextImpl implements ExtractionContext {
private final ServiceRegistry serviceRegistry; private final ServiceRegistry serviceRegistry;
private final JdbcEnvironment jdbcEnvironment; private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext; private final SqlStringGenerationContext context;
private final JdbcConnectionAccess jdbcConnectionAccess; private final JdbcConnectionAccess jdbcConnectionAccess;
private final DatabaseObjectAccess registeredTableAccess; private final DatabaseObjectAccess registeredTableAccess;
@ -33,12 +33,12 @@ public class ExtractionContextImpl implements ExtractionContext {
public ExtractionContextImpl( public ExtractionContextImpl(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
JdbcConnectionAccess jdbcConnectionAccess, JdbcConnectionAccess jdbcConnectionAccess,
DatabaseObjectAccess registeredTableAccess) { DatabaseObjectAccess registeredTableAccess) {
this.serviceRegistry = serviceRegistry; this.serviceRegistry = serviceRegistry;
this.jdbcEnvironment = jdbcEnvironment; this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext; this.context = context;
this.jdbcConnectionAccess = jdbcConnectionAccess; this.jdbcConnectionAccess = jdbcConnectionAccess;
this.registeredTableAccess = registeredTableAccess; this.registeredTableAccess = registeredTableAccess;
} }
@ -55,7 +55,7 @@ public class ExtractionContextImpl implements ExtractionContext {
@Override @Override
public SqlStringGenerationContext getSqlStringGenerationContext() { public SqlStringGenerationContext getSqlStringGenerationContext() {
return sqlStringGenerationContext; return context;
} }
@Override @Override
@ -86,12 +86,12 @@ public class ExtractionContextImpl implements ExtractionContext {
@Override @Override
public Identifier getDefaultCatalog() { public Identifier getDefaultCatalog() {
return sqlStringGenerationContext.getDefaultCatalog(); return context.getDefaultCatalog();
} }
@Override @Override
public Identifier getDefaultSchema() { public Identifier getDefaultSchema() {
return sqlStringGenerationContext.getDefaultSchema(); return context.getDefaultSchema();
} }
@Override @Override

View File

@ -8,7 +8,6 @@ package org.hibernate.tool.schema.internal;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
@ -62,6 +61,8 @@ import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.DRO
import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.SKIP; import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.SKIP;
/** /**
* Base implementation of {@link SchemaMigrator}.
*
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public abstract class AbstractSchemaMigrator implements SchemaMigrator { public abstract class AbstractSchemaMigrator implements SchemaMigrator {

View File

@ -34,6 +34,8 @@ import org.hibernate.type.descriptor.JdbcTypeNameMapper;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
/** /**
* Base implementation of {@link SchemaValidator}.
*
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public abstract class AbstractSchemaValidator implements SchemaValidator { public abstract class AbstractSchemaValidator implements SchemaValidator {
@ -54,7 +56,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter) { ContributableMatcher contributableInclusionFilter) {
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap(
tool.getServiceRegistry().getService( JdbcEnvironment.class ), tool.getServiceRegistry().getService( JdbcEnvironment.class ),
metadata.getDatabase(), metadata.getDatabase(),
options.getConfigurationValues() options.getConfigurationValues()
@ -65,7 +67,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation( final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(), tool.getServiceRegistry(),
isolator, isolator,
sqlStringGenerationContext, context,
tool tool
); );

View File

@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
/** /**
* @author Andrea Boriero
*
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call * This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* to retrieve all the database table in order to determine if all the {@link jakarta.persistence.Entity} have a mapped database tables. * to retrieve all the database table in order to determine if all the {@link jakarta.persistence.Entity} have a mapped database tables.
*
* @author Andrea Boriero
*/ */
public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator { public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
@ -50,7 +50,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
boolean tryToCreateSchemas, boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs, Set<Identifier> exportedCatalogs,
Namespace namespace, Namespace namespace,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
GenerationTarget[] targets) { GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation = final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
@ -76,12 +76,12 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = tables.getTableInformation( table ); final TableInformation tableInformation = tables.getTableInformation( table );
if ( tableInformation == null ) { if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets ); createTable( table, dialect, metadata, formatter, options, context, targets );
} }
else if ( tableInformation.isPhysicalTable() ) { else if ( tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation ); tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options, migrateTable( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
} }
} }
} }
@ -93,9 +93,9 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
final TableInformation tableInformation = tablesInformation.getTableInformation( table ); final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) { if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, applyIndexes( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
} }
} }
} }

View File

@ -48,7 +48,7 @@ public class Helper {
private static final Pattern COMMA_PATTERN = Pattern.compile( "\\s*,\\s*" ); private static final Pattern COMMA_PATTERN = Pattern.compile( "\\s*,\\s*" );
public static ScriptSourceInput interpretScriptSourceSetting( public static ScriptSourceInput interpretScriptSourceSetting(
Object scriptSourceSetting, Object scriptSourceSetting, //Reader or String URL
ClassLoaderService classLoaderService, ClassLoaderService classLoaderService,
String charsetName ) { String charsetName ) {
if ( scriptSourceSetting instanceof Reader ) { if ( scriptSourceSetting instanceof Reader ) {
@ -169,14 +169,14 @@ public class Helper {
public static DatabaseInformation buildDatabaseInformation( public static DatabaseInformation buildDatabaseInformation(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
DdlTransactionIsolator ddlTransactionIsolator, DdlTransactionIsolator ddlTransactionIsolator,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
SchemaManagementTool tool) { SchemaManagementTool tool) {
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class ); final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
try { try {
return new DatabaseInformationImpl( return new DatabaseInformationImpl(
serviceRegistry, serviceRegistry,
jdbcEnvironment, jdbcEnvironment,
sqlStringGenerationContext, context,
ddlTransactionIsolator, ddlTransactionIsolator,
tool tool
); );

View File

@ -449,13 +449,13 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
public ExtractionContext createExtractionContext( public ExtractionContext createExtractionContext(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator, DdlTransactionIsolator ddlTransactionIsolator,
ExtractionContext.DatabaseObjectAccess databaseObjectAccess) { ExtractionContext.DatabaseObjectAccess databaseObjectAccess) {
return new ImprovedExtractionContextImpl( return new ImprovedExtractionContextImpl(
serviceRegistry, serviceRegistry,
jdbcEnvironment, jdbcEnvironment,
sqlStringGenerationContext, context,
ddlTransactionIsolator, ddlTransactionIsolator,
databaseObjectAccess databaseObjectAccess
); );

View File

@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
/** /**
* @author Andrea Boriero
*
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call * This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* for each {@link jakarta.persistence.Entity} in order to determine if a corresponding database table exists. * for each {@link jakarta.persistence.Entity} in order to determine if a corresponding database table exists.
*
* @author Andrea Boriero
*/ */
public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator { public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
@ -50,7 +50,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
boolean tryToCreateSchemas, boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs, Set<Identifier> exportedCatalogs,
Namespace namespace, Namespace namespace,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
GenerationTarget[] targets) { GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation = final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
@ -74,12 +74,12 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() ); final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation == null ) { if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets ); createTable( table, dialect, metadata, formatter, options, context, targets );
} }
else if ( tableInformation.isPhysicalTable() ) { else if ( tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation ); tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options, migrateTable( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
} }
} }
} }
@ -91,9 +91,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
final TableInformation tableInformation = tablesInformation.getTableInformation( table ); final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) { if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, applyIndexes( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets ); context, targets );
} }
} }
} }

View File

@ -27,7 +27,6 @@ import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl; import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
@ -35,9 +34,6 @@ import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter; import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index; import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table; import org.hibernate.mapping.Table;
@ -64,13 +60,17 @@ import org.hibernate.tool.schema.spi.SqlScriptCommandExtractor;
import org.hibernate.tool.schema.spi.TargetDescriptor; import org.hibernate.tool.schema.spi.TargetDescriptor;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME; import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_IMPORT_FILES;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE; import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE;
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE; import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize;
import static org.hibernate.internal.util.config.ConfigurationHelper.getString;
import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled;
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting; import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
/** /**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some * Basic implementation of {@link SchemaCreator}.
* additional stuff in the script).
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
@ -113,19 +113,18 @@ public class SchemaCreatorImpl implements SchemaCreator {
ContributableMatcher contributableInclusionFilter, ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) { TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) { if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
return; final Map<String, Object> configuration = options.getConfigurationValues();
final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration );
doCreation(
metadata,
jdbcContext.getDialect(),
options,
contributableInclusionFilter,
sourceDescriptor,
tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true )
);
} }
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets(
targetDescriptor,
jdbcContext,
options.getConfigurationValues(),
true
);
doCreation( metadata, jdbcContext.getDialect(), options, contributableInclusionFilter, sourceDescriptor, targets );
} }
@Internal @Internal
@ -162,34 +161,38 @@ public class SchemaCreatorImpl implements SchemaCreator {
ContributableMatcher contributableInclusionFilter, ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); final SqlScriptCommandExtractor commandExtractor = getCommandExtractor();
final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter(); final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
switch ( sourceDescriptor.getSourceType() ) { switch ( sourceDescriptor.getSourceType() ) {
case SCRIPT: { case SCRIPT:
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break; break;
} case METADATA:
case METADATA: {
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
break; break;
} case METADATA_THEN_SCRIPT:
case METADATA_THEN_SCRIPT: {
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break; break;
} case SCRIPT_THEN_METADATA:
case SCRIPT_THEN_METADATA: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
} break;
} }
applyImportSources( options, commandExtractor, format, dialect, targets ); applyImportSources( options, commandExtractor, format, dialect, targets );
} }
private SqlScriptCommandExtractor getCommandExtractor() {
return tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
}
private ClassLoaderService getClassLoaderService() {
return tool.getServiceRegistry().getService( ClassLoaderService.class );
}
public void createFromScript( public void createFromScript(
ScriptSourceInput scriptSourceInput, ScriptSourceInput scriptSourceInput,
SqlScriptCommandExtractor commandExtractor, SqlScriptCommandExtractor commandExtractor,
@ -200,9 +203,8 @@ public class SchemaCreatorImpl implements SchemaCreator {
final List<String> commands = scriptSourceInput.extract( final List<String> commands = scriptSourceInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect ) reader -> commandExtractor.extractCommands( reader, dialect )
); );
for ( String command : commands ) {
for ( int i = 0; i < commands.size(); i++ ) { applySqlString( command, formatter, options, targets );
applySqlString( commands.get( i ), formatter, options, targets );
} }
} }
@ -223,6 +225,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
); );
} }
private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) {
final Database database = metadata.getDatabase();
return SqlStringGenerationContextImpl.fromConfigurationMap(
database.getJdbcEnvironment(),
database,
options.getConfigurationValues()
);
}
@Internal @Internal
public void createFromMetadata( public void createFromMetadata(
Metadata metadata, Metadata metadata,
@ -231,256 +242,315 @@ public class SchemaCreatorImpl implements SchemaCreator {
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
GenerationTarget... targets) { GenerationTarget... targets) {
boolean tryToCreateCatalogs = false; final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata );
boolean tryToCreateSchemas = false; final Set<String> exportIdentifiers = setOfSize( 50 );
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToCreateSchemas = true;
}
if ( dialect.canCreateCatalog() ) {
tryToCreateCatalogs = true;
}
}
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
jdbcEnvironment, database, options.getConfigurationValues() );
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
// first, create each catalog/schema
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName =
sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() );
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
exportedCatalogs.add( catalogLogicalName );
}
}
final Identifier schemaPhysicalName =
sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() );
if ( tryToCreateSchemas && schemaPhysicalName != null ) {
applySqlStrings(
dialect.getCreateSchemaCommand( schemaPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
}
}
}
createSchemasAndCatalogs( metadata, options, dialect, formatter, context, targets );
// next, create all UDTs // next, create all UDTs
for ( Namespace namespace : database.getNamespaces() ) { createUserDefinedTypes( metadata, options, dialect, formatter, context, targets );
if ( !options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) {
applySqlStrings(
dialect.getUserDefinedTypeExporter().getSqlCreateStrings(
userDefinedType,
metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
// next, create all "before table" auxiliary objects // next, create all "before table" auxiliary objects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) { createAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets );
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings(
auxiliaryDatabaseObject,
metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
// then, create all schema objects (tables, sequences, constraints, etc) in each schema // then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) { createSequencesTablesConstraints(
metadata,
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { options,
continue; contributableInclusionMatcher,
} dialect,
formatter,
// sequences context,
for ( Sequence sequence : namespace.getSequences() ) { exportIdentifiers,
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) { targets
continue; );
} // foreign keys must be created after all tables of all namespaces for cross-namespace constraints (see HHH-10420)
createForeignKeys( metadata, options, contributableInclusionMatcher, dialect, formatter, context, targets );
if ( ! contributableInclusionMatcher.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata,
sqlStringGenerationContext
),
// dialect.getCreateSequenceStrings(
// jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), dialect ),
// sequence.getInitialValue(),
// sequence.getIncrementSize()
// ),
formatter,
options,
targets
);
}
// tables
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ){
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata, sqlStringGenerationContext ),
formatter,
options,
targets
);
}
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ){
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// indexes
for ( Index index : table.getIndexes().values() ) {
checkExportIdentifier( index, exportIdentifiers );
applySqlStrings(
dialect.getIndexExporter().getSqlCreateStrings( index, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
// unique keys
for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) {
checkExportIdentifier( uniqueKey, exportIdentifiers );
applySqlStrings(
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
//NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) {
// NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// foreign keys
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
// next, create all "after table" auxiliary objects // next, create all "after table" auxiliary objects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) { createAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets );
// and finally add all init commands
executeInitCommands(metadata, options, formatter, targets);
}
private static void executeInitCommands(Metadata metadata, ExecutionOptions options, Formatter formatter, GenerationTarget[] targets) {
for ( InitCommand initCommand : metadata.getDatabase().getInitCommands() ) {
// todo: this should alo probably use the DML formatter...
applySqlStrings( initCommand.getInitCommands(), formatter, options, targets);
}
}
private static void createAuxiliaryObjectsAfterTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) if ( auxiliaryDatabaseObject.appliesToDialect( dialect )
&& !auxiliaryDatabaseObject.beforeTablesOnCreation() ) { && !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers ); checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings( applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( auxiliaryDatabaseObject, metadata, dialect.getAuxiliaryDatabaseObjectExporter()
sqlStringGenerationContext .getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ),
),
formatter, formatter,
options, options,
targets targets
); );
} }
} }
}
// and finally add all init commands private static void createForeignKeys(
for ( InitCommand initCommand : database.getInitCommands() ) { Metadata metadata,
// todo: this should alo probably use the DML formatter... ExecutionOptions options,
applySqlStrings( initCommand.getInitCommands(), formatter, options, targets ); ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
// foreign keys must be created after unique keys for numerous DBs (see HHH-8390)
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Table table : namespace.getTables() ) {
if ( options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
// foreign keys
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
}
}
private static void createSequencesTablesConstraints(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
// sequences
createSequences(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
// tables
createTables(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
createTableConstraints(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
}
}
}
private static void createTableConstraints(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
// indexes
for ( Index index : table.getIndexes().values() ) {
checkExportIdentifier( index, exportIdentifiers );
applySqlStrings(
dialect.getIndexExporter().getSqlCreateStrings( index, metadata, context ),
formatter,
options,
targets
);
}
// unique keys
for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) {
checkExportIdentifier( uniqueKey, exportIdentifiers );
applySqlStrings(
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
private static void createTables(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createSequences(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( options.getSchemaFilter().includeSequence( sequence )
&& contributableInclusionMatcher.matches( sequence ) ) {
checkExportIdentifier( sequence, exportIdentifiers);
applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings( sequence, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createAuxiliaryObjectsBeforeTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createUserDefinedTypes(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) {
applySqlStrings(
dialect.getUserDefinedTypeExporter()
.getSqlCreateStrings( userDefinedType, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
private static void createSchemasAndCatalogs(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
final boolean tryToCreateCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog();
final boolean tryToCreateSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema();
// first, create each catalog/schema
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName =
context.catalogWithDefault( namespace.getPhysicalName().getCatalog() );
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
exportedCatalogs.add( catalogLogicalName );
}
}
final Identifier schemaPhysicalName =
context.schemaWithDefault( namespace.getPhysicalName().getSchema() );
if ( tryToCreateSchemas && schemaPhysicalName != null ) {
applySqlStrings(
dialect.getCreateSchemaCommand( schemaPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
}
}
}
} }
} }
@ -497,12 +567,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
GenerationTarget... targets) { GenerationTarget... targets) {
if ( sqlStrings == null ) { if ( sqlStrings != null ) {
return; for ( String sqlString : sqlStrings ) {
} applySqlString( sqlString, formatter, options, targets );
}
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
} }
} }
@ -511,18 +579,16 @@ public class SchemaCreatorImpl implements SchemaCreator {
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
GenerationTarget... targets) { GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) { if ( isNotEmpty( sqlString ) ) {
return; try {
} final String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
try { target.accept( sqlStringFormatted );
String sqlStringFormatted = formatter.format( sqlString ); }
for ( GenerationTarget target : targets ) { }
target.accept( sqlStringFormatted ); catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
} }
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
} }
} }
@ -532,75 +598,159 @@ public class SchemaCreatorImpl implements SchemaCreator {
boolean format, boolean format,
Dialect dialect, Dialect dialect,
GenerationTarget... targets) { GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = tool.getServiceRegistry();
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
// I have had problems applying the formatter to these imported statements. final Formatter formatter = getImportScriptFormatter(format);
// and legacy SchemaExport did not format them, so doing same here
//final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter();
final Formatter formatter = FormatStyle.NONE.getFormatter();
Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE ); boolean hasDefaultImportFileScriptBeenExecuted = applyImportScript(
if ( importScriptSetting == null ) { options,
importScriptSetting = options.getConfigurationValues().get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE ); commandExtractor,
} dialect,
String charsetName = (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME ); formatter,
targets
boolean hasDefaultImportFileScriptBeenExecuted = false;
if ( importScriptSetting != null ) {
final ScriptSourceInput importScriptInput = interpretScriptSourceSetting( importScriptSetting, classLoaderService, charsetName );
final URL defaultImportFileUrl = classLoaderService.locateResource( DEFAULT_IMPORT_FILE );
if ( defaultImportFileUrl != null && importScriptInput.containsScript( defaultImportFileUrl ) ) {
hasDefaultImportFileScriptBeenExecuted = true;
}
final List<String> commands = importScriptInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
);
for ( int i = 0; i < commands.size(); i++ ) {
applySqlString( commands.get( i ), formatter, options, targets );
}
}
final String importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
options.getConfigurationValues(),
hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE
); );
applyImportFiles(
options,
commandExtractor,
dialect,
formatter,
hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE,
targets
);
}
for ( String currentFile : importFiles.split( "," ) ) { /**
final String resourceName = currentFile.trim(); * In principle, we should format the commands in the import script if the
if ( resourceName.isEmpty() ) { * {@code format} parameter is {@code true}, and since it's supposed to be
//skip empty resource names * a list of DML statements, we should use the {@linkplain FormatStyle#BASIC
continue; * basic DML formatter} to do that. However, in practice we don't really know
} * much about what this file contains, and we have never formatted it in the
final ScriptSourceInput importScriptInput = interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName ); * past, so there's no compelling reason to start now. In fact, if we have
final List<String> commands = importScriptInput.extract( * lists of many {@code insert} statements on the same table, which is what
reader -> commandExtractor.extractCommands( reader, dialect ) * we typically expect, it's probably better to not format.
*/
private static Formatter getImportScriptFormatter(boolean format) {
// return format ? FormatStyle.BASIC.getFormatter() : FormatStyle.NONE.getFormatter();
return FormatStyle.NONE.getFormatter();
}
/**
* Handles import scripts specified using
* {@link org.hibernate.cfg.AvailableSettings#HBM2DDL_IMPORT_FILES}.
*
* @return {@code true} if the legacy {@linkplain #DEFAULT_IMPORT_FILE default import file}
* was one of the listed imported files that were executed
*/
private boolean applyImportScript(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
Formatter formatter,
GenerationTarget[] targets) {
final Object importScriptSetting = getImportScriptSetting( options );
if ( importScriptSetting != null ) {
final ScriptSourceInput importScriptInput =
interpretScriptSourceSetting( importScriptSetting, getClassLoaderService(), getCharsetName( options ) );
applyImportScript(
options,
commandExtractor,
dialect,
importScriptInput,
formatter,
targets
); );
for ( int i = 0; i < commands.size(); i++ ) { return containsDefaultImportFile( importScriptInput );
applySqlString( commands.get( i ), formatter, options, targets ); }
else {
return false;
}
}
private boolean containsDefaultImportFile(ScriptSourceInput importScriptInput) {
final URL defaultImportFileUrl = getClassLoaderService().locateResource( DEFAULT_IMPORT_FILE );
return defaultImportFileUrl != null && importScriptInput.containsScript(defaultImportFileUrl);
}
/**
* Handles import scripts specified using
* {@link org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE}.
*/
private void applyImportFiles(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
Formatter formatter,
String defaultImportFile,
GenerationTarget[] targets) {
final String[] importFiles =
getString( HBM2DDL_IMPORT_FILES, options.getConfigurationValues(), defaultImportFile )
.split( "," );
final String charsetName = getCharsetName( options );
final ClassLoaderService classLoaderService = getClassLoaderService();
for ( String currentFile : importFiles ) {
final String resourceName = currentFile.trim();
if ( !resourceName.isEmpty() ) { //skip empty resource names
applyImportScript(
options,
commandExtractor,
dialect,
interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName ),
formatter,
targets
);
} }
} }
} }
private static void applyImportScript(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
ScriptSourceInput importScriptInput,
Formatter formatter,
GenerationTarget[] targets) {
final List<String> commands = importScriptInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
);
for ( String command : commands ) {
applySqlString( command, formatter, options, targets );
}
}
private ScriptSourceInput interpretLegacyImportScriptSetting( private ScriptSourceInput interpretLegacyImportScriptSetting(
String resourceName, String resourceName,
ClassLoaderService classLoaderService, ClassLoaderService classLoaderService,
String charsetName) { String charsetName) {
try { try {
final URL resourceUrl = classLoaderService.locateResource( resourceName ); final URL resourceUrl = classLoaderService.locateResource( resourceName );
if ( resourceUrl == null ) { return resourceUrl == null
return ScriptSourceInputNonExistentImpl.INSTANCE; ? ScriptSourceInputNonExistentImpl.INSTANCE
} : new ScriptSourceInputFromUrl( resourceUrl, charsetName );
else {
return new ScriptSourceInputFromUrl( resourceUrl, charsetName );
}
} }
catch (Exception e) { catch (Exception e) {
throw new SchemaManagementException( "Error resolving legacy import resource : " + resourceName, e ); throw new SchemaManagementException( "Error resolving legacy import resource : " + resourceName, e );
} }
} }
/**
* @see org.hibernate.cfg.AvailableSettings#HBM2DDL_CHARSET_NAME
*/
private static String getCharsetName(ExecutionOptions options) {
return (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME );
}
/**
* @see org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE
*
* @return a {@link java.io.Reader} or a string URL
*/
private static Object getImportScriptSetting(ExecutionOptions options) {
final Map<String, Object> configuration = options.getConfigurationValues();
final Object importScriptSetting = configuration.get( HBM2DDL_LOAD_SCRIPT_SOURCE );
return importScriptSetting == null
? configuration.get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE )
: importScriptSetting;
}
/** /**
* For testing... * For testing...
* *
@ -650,7 +800,9 @@ public class SchemaCreatorImpl implements SchemaCreator {
Metadata metadata, Metadata metadata,
final boolean manageNamespaces, final boolean manageNamespaces,
GenerationTarget... targets) { GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(); final ServiceRegistry serviceRegistry =
( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
doCreation( doCreation(
metadata, metadata,
serviceRegistry, serviceRegistry,

View File

@ -37,8 +37,6 @@ import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger; import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table; import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType; import org.hibernate.mapping.UserDefinedType;
@ -65,9 +63,12 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize;
import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled;
/** /**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some * Basic implementation of {@link SchemaDropper}.
* additional stuff in the script).
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
@ -105,18 +106,21 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop( public void doDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) { TargetDescriptor targetDescriptor) {
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) { final Map<String, Object> configuration = options.getConfigurationValues();
return; final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration );
doDrop(
metadata,
options,
inclusionFilter,
jdbcContext.getDialect(),
sourceDescriptor,
tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true )
);
} }
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), sourceDescriptor, targets );
} }
/** /**
@ -129,7 +133,7 @@ public class SchemaDropperImpl implements SchemaDropper {
Dialect dialect, Dialect dialect,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
doDrop( metadata, options, (contributed) -> true, dialect, sourceDescriptor, targets ); doDrop( metadata, options, contributed -> true, dialect, sourceDescriptor, targets );
} }
/** /**
@ -139,7 +143,7 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop( public void doDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
Dialect dialect, Dialect dialect,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
@ -148,7 +152,7 @@ public class SchemaDropperImpl implements SchemaDropper {
} }
try { try {
performDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, targets ); performDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, targets );
} }
finally { finally {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
@ -165,30 +169,36 @@ public class SchemaDropperImpl implements SchemaDropper {
private void performDrop( private void performDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
Dialect dialect, Dialect dialect,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); final SqlScriptCommandExtractor commandExtractor = getCommandExtractor();
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() ); final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter(); final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
if ( sourceDescriptor.getSourceType() == SourceType.SCRIPT ) { switch ( sourceDescriptor.getSourceType() ) {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); case SCRIPT:
} dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) { break;
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); case METADATA:
} dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) { break;
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); case METADATA_THEN_SCRIPT:
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
} dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
else { break;
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); case SCRIPT_THEN_METADATA:
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets ); dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
break;
} }
} }
private SqlScriptCommandExtractor getCommandExtractor() {
return tool.getServiceRegistry().getService(SqlScriptCommandExtractor.class);
}
private void dropFromScript( private void dropFromScript(
ScriptSourceInput scriptSourceInput, ScriptSourceInput scriptSourceInput,
SqlScriptCommandExtractor commandExtractor, SqlScriptCommandExtractor commandExtractor,
@ -199,175 +209,245 @@ public class SchemaDropperImpl implements SchemaDropper {
final List<String> commands = scriptSourceInput.extract( final List<String> commands = scriptSourceInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect ) reader -> commandExtractor.extractCommands( reader, dialect )
); );
for ( int i = 0; i < commands.size(); i++ ) { for ( String command : commands ) {
applySqlString( commands.get( i ), formatter, options, targets ); applySqlString( command, formatter, options, targets );
} }
} }
private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) {
final Database database = metadata.getDatabase();
return SqlStringGenerationContextImpl.fromConfigurationMap(
database.getJdbcEnvironment(),
database,
options.getConfigurationValues()
);
}
private void dropFromMetadata( private void dropFromMetadata(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues());
boolean tryToDropCatalogs = false;
boolean tryToDropSchemas = false;
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToDropSchemas = true;
}
if ( dialect.canCreateCatalog() ) {
tryToDropCatalogs = true;
}
}
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
// NOTE : init commands are irrelevant for dropping... // NOTE : init commands are irrelevant for dropping...
final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata );
// Reverse the list on drop to retain possible dependencies // Reverse the list on drop to retain possible dependencies
final Collection<AuxiliaryDatabaseObject> reversedAuxiliaryDatabaseObjects = reverse( database.getAuxiliaryDatabaseObjects() ); dropAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, targets );
dropConstraintsTablesSequences(
metadata,
options,
inclusionFilter,
dialect,
formatter,
context,
targets
);
dropAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, targets );
dropUserDefinedTypes( metadata, options, dialect, formatter, context, targets );
dropSchemasAndCatalogs( metadata, options, dialect, formatter, targets );
}
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) { private void dropConstraintsTablesSequences(
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() ) { Metadata metadata,
continue; ExecutionOptions options,
} ContributableMatcher inclusionFilter,
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { Dialect dialect,
continue; Formatter formatter,
} SqlStringGenerationContext context,
GenerationTarget[] targets) {
final Set<String> exportIdentifiers = setOfSize( 50 );
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
applySqlStrings( // we need to drop all constraints/indexes prior to dropping the tables
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata, applyConstraintDropping(
sqlStringGenerationContext namespace,
), metadata,
formatter, formatter,
options, options,
targets context,
); inclusionFilter,
} targets
);
for ( Namespace namespace : database.getNamespaces() ) { // now it's safe to drop the tables
dropTables(
metadata,
options,
inclusionFilter,
dialect,
formatter,
exportIdentifiers,
context,
namespace,
targets
);
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { dropSequences(
continue; metadata,
} options,
inclusionFilter,
// we need to drop all constraints/indexes prior to dropping the tables dialect,
applyConstraintDropping( namespace, metadata, formatter, options, sqlStringGenerationContext, formatter,
contributableInclusionFilter, targets ); exportIdentifiers,
context,
// now it's safe to drop the tables namespace,
for ( Table table : namespace.getTables() ) { targets
if ( ! table.isPhysicalTable() ) { );
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata,
sqlStringGenerationContext
), formatter, options,targets );
}
for ( Sequence sequence : namespace.getSequences() ) {
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata,
sqlStringGenerationContext
), formatter, options, targets );
} }
} }
}
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) { private static void dropAuxiliaryObjectsBeforeTables(
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) { Metadata metadata,
continue; ExecutionOptions options,
} Dialect dialect,
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) { Formatter formatter,
continue; SqlStringGenerationContext context,
} GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject :
applySqlStrings( reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) {
auxiliaryDatabaseObject.sqlDropStrings( sqlStringGenerationContext ), if ( !auxiliaryDatabaseObject.beforeTablesOnCreation()
formatter, && auxiliaryDatabaseObject.appliesToDialect(dialect) ) {
options,
targets
);
}
for ( Namespace namespace : database.getNamespaces() ) {
if ( !options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
final List<UserDefinedType> dependencyOrderedUserDefinedTypes = namespace.getDependencyOrderedUserDefinedTypes();
Collections.reverse( dependencyOrderedUserDefinedTypes );
for ( UserDefinedType userDefinedType : dependencyOrderedUserDefinedTypes ) {
applySqlStrings( applySqlStrings(
dialect.getUserDefinedTypeExporter() dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlDropStrings( userDefinedType, metadata, sqlStringGenerationContext ), .getSqlDropStrings( auxiliaryDatabaseObject, metadata, context ),
formatter, formatter,
options, options,
targets targets
); );
} }
} }
}
if ( tryToDropCatalogs || tryToDropSchemas ) { private static void dropAuxiliaryObjectsAfterTables(
Set<Identifier> exportedCatalogs = new HashSet<>(); Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject :
reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect(dialect) ) {
applySqlStrings(
auxiliaryDatabaseObject.sqlDropStrings( context ),
formatter,
options,
targets
);
}
}
}
for ( Namespace namespace : database.getNamespaces() ) { private static void dropSequences(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
SqlStringGenerationContext context,
Namespace namespace,
GenerationTarget[] targets) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( options.getSchemaFilter().includeSequence( sequence )
&& inclusionFilter.matches( sequence ) ) {
checkExportIdentifier( sequence, exportIdentifiers);
applySqlStrings(
dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, context ),
formatter,
options,
targets
);
}
}
}
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) { private static void dropTables(
continue; Metadata metadata,
} ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
SqlStringGenerationContext context,
Namespace namespace,
GenerationTarget[] targets) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& inclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers);
applySqlStrings(
dialect.getTableExporter().getSqlDropStrings( table, metadata, context),
formatter,
options,
targets
);
}
}
}
if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) { private static void dropUserDefinedTypes(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
final List<UserDefinedType> dependencyOrderedUserDefinedTypes = namespace.getDependencyOrderedUserDefinedTypes();
Collections.reverse( dependencyOrderedUserDefinedTypes );
for ( UserDefinedType userDefinedType : dependencyOrderedUserDefinedTypes ) {
applySqlStrings( applySqlStrings(
dialect.getDropSchemaCommand( dialect.getUserDefinedTypeExporter()
namespace.getPhysicalName().getSchema().render( dialect ) .getSqlDropStrings( userDefinedType, metadata, context ),
),
formatter, formatter,
options, options,
targets targets
); );
} }
if ( tryToDropCatalogs ) { }
final Identifier catalogLogicalName = namespace.getName().getCatalog(); }
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog(); }
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) { private static void dropSchemasAndCatalogs(
applySqlStrings( Metadata metadata,
dialect.getDropCatalogCommand( ExecutionOptions options,
catalogPhysicalName.render( dialect ) Dialect dialect,
), Formatter formatter,
formatter, GenerationTarget[] targets) {
options, boolean tryToDropCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog();
targets boolean tryToDropSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema();
); if ( tryToDropCatalogs || tryToDropSchemas) {
exportedCatalogs.add( catalogLogicalName ); final Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) {
final String schemaName = namespace.getPhysicalName().getSchema().render( dialect );
applySqlStrings( dialect.getDropSchemaCommand( schemaName ), formatter, options, targets);
}
if (tryToDropCatalogs) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
final String catalogName = catalogPhysicalName.render( dialect );
applySqlStrings( dialect.getDropCatalogCommand( catalogName ), formatter, options, targets );
exportedCatalogs.add( catalogLogicalName );
}
} }
} }
} }
} }
} }
private Collection<AuxiliaryDatabaseObject> reverse(Collection<AuxiliaryDatabaseObject> auxiliaryDatabaseObjects) { private static Collection<AuxiliaryDatabaseObject> reverse(Collection<AuxiliaryDatabaseObject> auxiliaryDatabaseObjects) {
final List<AuxiliaryDatabaseObject> list = new ArrayList<>( auxiliaryDatabaseObjects ); final List<AuxiliaryDatabaseObject> list = new ArrayList<>( auxiliaryDatabaseObjects );
Collections.reverse( list ); Collections.reverse( list );
return list; return list;
@ -378,35 +458,24 @@ public class SchemaDropperImpl implements SchemaDropper {
Metadata metadata, Metadata metadata,
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
if ( dialect.dropConstraints() ) {
if ( !dialect.dropConstraints() ) { for ( Table table : namespace.getTables() ) {
return; if ( table.isPhysicalTable()
} && options.getSchemaFilter().includeTable( table )
&& inclusionFilter.matches( table ) ) {
for ( Table table : namespace.getTables() ) { for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
if ( !table.isPhysicalTable() ) { applySqlStrings(
continue; dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, context ),
} formatter,
if ( ! options.getSchemaFilter().includeTable( table ) ) { options,
continue; targets
} );
if ( ! contributableInclusionFilter.matches( table ) ) { }
continue; }
}
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
} }
} }
} }
@ -424,12 +493,10 @@ public class SchemaDropperImpl implements SchemaDropper {
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
GenerationTarget... targets) { GenerationTarget... targets) {
if ( sqlStrings == null ) { if ( sqlStrings != null ) {
return; for ( String sqlString : sqlStrings ) {
} applySqlString( sqlString, formatter, options, targets );
}
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
} }
} }
@ -438,73 +505,28 @@ public class SchemaDropperImpl implements SchemaDropper {
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
GenerationTarget... targets) { GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) { if ( isNotEmpty( sqlString ) ) {
return; final String sqlStringFormatted = formatter.format( sqlString );
} for ( GenerationTarget target : targets ) {
try {
String sqlStringFormatted = formatter.format( sqlString ); target.accept( sqlStringFormatted );
for ( GenerationTarget target : targets ) { }
try { catch (CommandAcceptanceException e) {
target.accept( sqlStringFormatted ); options.getExceptionHandler().handleException( e );
} }
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
} }
} }
} }
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateDropCommands(Metadata metadata, final boolean manageNamespaces) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map<String,Object> getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
};
dropFromMetadata( metadata, options, (contributed) -> true, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
@Override @Override
public DelayedDropAction buildDelayedAction( public DelayedDropAction buildDelayedAction(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter, ContributableMatcher inclusionFilter,
SourceDescriptor sourceDescriptor) { SourceDescriptor sourceDescriptor) {
final JournalingGenerationTarget target = new JournalingGenerationTarget(); final JournalingGenerationTarget target = new JournalingGenerationTarget();
final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect(); final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect();
doDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, target ); doDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, target );
return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() ); return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() );
} }
@ -512,7 +534,9 @@ public class SchemaDropperImpl implements SchemaDropper {
* For tests * For tests
*/ */
public void doDrop(Metadata metadata, boolean manageNamespaces, GenerationTarget... targets) { public void doDrop(Metadata metadata, boolean manageNamespaces, GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(); final ServiceRegistry serviceRegistry =
( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
doDrop( doDrop(
metadata, metadata,
serviceRegistry, serviceRegistry,
@ -535,7 +559,8 @@ public class SchemaDropperImpl implements SchemaDropper {
final JdbcContext jdbcContext = tool.resolveJdbcContext( settings ); final JdbcContext jdbcContext = tool.resolveJdbcContext( settings );
targets = new GenerationTarget[] { targets = new GenerationTarget[] {
new GenerationTargetToDatabase( new GenerationTargetToDatabase(
serviceRegistry.getService( TransactionCoordinatorBuilder.class ).buildDdlTransactionIsolator( jdbcContext ), serviceRegistry.getService( TransactionCoordinatorBuilder.class )
.buildDdlTransactionIsolator( jdbcContext ),
true true
) )
}; };

View File

@ -49,6 +49,8 @@ import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SO
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting; import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
/** /**
* Basic implementation of {@link SchemaTruncator}.
*
* @author Gavin King * @author Gavin King
*/ */
public class SchemaTruncatorImpl implements SchemaTruncator { public class SchemaTruncatorImpl implements SchemaTruncator {
@ -119,7 +121,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Formatter formatter, Formatter formatter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Database database = metadata.getDatabase(); final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap( SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap(
metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues() ); metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues() );
@ -131,7 +133,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
continue; continue;
} }
disableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext, disableConstraints( namespace, metadata, formatter, options, context,
contributableInclusionFilter, targets ); contributableInclusionFilter, targets );
applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options,targets ); applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options,targets );
@ -151,7 +153,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
list.add( table ); list.add( table );
} }
applySqlStrings( dialect.getTableCleaner().getSqlTruncateStrings( list, metadata, applySqlStrings( dialect.getTableCleaner().getSqlTruncateStrings( list, metadata,
sqlStringGenerationContext context
), formatter, options,targets ); ), formatter, options,targets );
//TODO: reset the sequences? //TODO: reset the sequences?
@ -165,12 +167,12 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
// checkExportIdentifier( sequence, exportIdentifiers ); // checkExportIdentifier( sequence, exportIdentifiers );
// //
// applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, // applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata,
// sqlStringGenerationContext // context
// ), formatter, options, targets ); // ), formatter, options, targets );
// } // }
applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options,targets ); applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options,targets );
enableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext, enableConstraints( namespace, metadata, formatter, options, context,
contributableInclusionFilter, targets ); contributableInclusionFilter, targets );
} }
@ -184,7 +186,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Metadata metadata, Metadata metadata,
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter, ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -204,7 +206,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
if ( dialect.canDisableConstraints() ) { if ( dialect.canDisableConstraints() ) {
applySqlString( applySqlString(
dialect.getTableCleaner().getSqlDisableConstraintString( foreignKey, metadata, dialect.getTableCleaner().getSqlDisableConstraintString( foreignKey, metadata,
sqlStringGenerationContext context
), ),
formatter, formatter,
options, options,
@ -214,7 +216,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
else if ( !dialect.canBatchTruncate() ) { else if ( !dialect.canBatchTruncate() ) {
applySqlStrings( applySqlStrings(
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata,
sqlStringGenerationContext context
), ),
formatter, formatter,
options, options,
@ -230,7 +232,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Metadata metadata, Metadata metadata,
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter, ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -250,7 +252,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
if ( dialect.canDisableConstraints() ) { if ( dialect.canDisableConstraints() ) {
applySqlString( applySqlString(
dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata, dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata,
sqlStringGenerationContext context
), ),
formatter, formatter,
options, options,
@ -260,7 +262,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
else if ( !dialect.canBatchTruncate() ) { else if ( !dialect.canBatchTruncate() ) {
applySqlStrings( applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata,
sqlStringGenerationContext context
), ),
formatter, formatter,
options, options,

View File

@ -50,8 +50,8 @@ public class StandardTableMigrator implements TableMigrator {
Table table, Table table,
Metadata metadata, Metadata metadata,
TableInformation tableInfo, TableInformation tableInfo,
SqlStringGenerationContext sqlStringGenerationContext) { SqlStringGenerationContext context) {
return sqlAlterStrings( table, dialect, metadata, tableInfo, sqlStringGenerationContext ) return sqlAlterStrings( table, dialect, metadata, tableInfo, context )
.toArray( EMPTY_STRING_ARRAY ); .toArray( EMPTY_STRING_ARRAY );
} }
@ -61,9 +61,9 @@ public class StandardTableMigrator implements TableMigrator {
Dialect dialect, Dialect dialect,
Metadata metadata, Metadata metadata,
TableInformation tableInformation, TableInformation tableInformation,
SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException { SqlStringGenerationContext context) throws HibernateException {
final String tableName = sqlStringGenerationContext.format( new QualifiedTableName( final String tableName = context.format( new QualifiedTableName(
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ), Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ), Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
table.getNameIdentifier() ) table.getNameIdentifier() )
@ -80,7 +80,7 @@ public class StandardTableMigrator implements TableMigrator {
if ( columnInformation == null ) { if ( columnInformation == null ) {
// the column doesn't exist at all. // the column doesn't exist at all.
final String addColumn = dialect.getAddColumnString() + ' ' final String addColumn = dialect.getAddColumnString() + ' '
+ getFullColumnDeclaration( column, table, metadata, dialect, sqlStringGenerationContext ) + getFullColumnDeclaration( column, table, metadata, dialect, context )
+ dialect.getAddColumnSuffixString(); + dialect.getAddColumnSuffixString();
results.add( alterTable + addColumn ); results.add( alterTable + addColumn );
} }

View File

@ -24,5 +24,5 @@ public interface TableMigrator {
Table table, Table table,
Metadata metadata, Metadata metadata,
TableInformation tableInfo, TableInformation tableInfo,
SqlStringGenerationContext sqlStringGenerationContext); SqlStringGenerationContext context);
} }

View File

@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
public class ImprovedExtractionContextImpl implements ExtractionContext { public class ImprovedExtractionContextImpl implements ExtractionContext {
private final ServiceRegistry serviceRegistry; private final ServiceRegistry serviceRegistry;
private final JdbcEnvironment jdbcEnvironment; private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext; private final SqlStringGenerationContext context;
private final DdlTransactionIsolator ddlTransactionIsolator; private final DdlTransactionIsolator ddlTransactionIsolator;
private final DatabaseObjectAccess databaseObjectAccess; private final DatabaseObjectAccess databaseObjectAccess;
@ -33,12 +33,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
public ImprovedExtractionContextImpl( public ImprovedExtractionContextImpl(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator, DdlTransactionIsolator ddlTransactionIsolator,
DatabaseObjectAccess databaseObjectAccess) { DatabaseObjectAccess databaseObjectAccess) {
this.serviceRegistry = serviceRegistry; this.serviceRegistry = serviceRegistry;
this.jdbcEnvironment = jdbcEnvironment; this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext; this.context = context;
this.ddlTransactionIsolator = ddlTransactionIsolator; this.ddlTransactionIsolator = ddlTransactionIsolator;
this.databaseObjectAccess = databaseObjectAccess; this.databaseObjectAccess = databaseObjectAccess;
} }
@ -55,7 +55,7 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
@Override @Override
public SqlStringGenerationContext getSqlStringGenerationContext() { public SqlStringGenerationContext getSqlStringGenerationContext() {
return sqlStringGenerationContext; return context;
} }
@Override @Override
@ -81,12 +81,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
@Override @Override
public Identifier getDefaultCatalog() { public Identifier getDefaultCatalog() {
return sqlStringGenerationContext.getDefaultCatalog(); return context.getDefaultCatalog();
} }
@Override @Override
public Identifier getDefaultSchema() { public Identifier getDefaultSchema() {
return sqlStringGenerationContext.getDefaultSchema(); return context.getDefaultSchema();
} }
@Override @Override

View File

@ -7,7 +7,6 @@
package org.hibernate.tool.schema.spi; package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating; import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.SqlStringGenerationContext; import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator; import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
@ -27,7 +26,7 @@ public interface ExtractionTool {
ExtractionContext createExtractionContext( ExtractionContext createExtractionContext(
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext, SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator, DdlTransactionIsolator ddlTransactionIsolator,
ExtractionContext.DatabaseObjectAccess databaseObjectAccess); ExtractionContext.DatabaseObjectAccess databaseObjectAccess);