refactor SchemaCreatorImpl, SchemaDropperImpl into bite-sized methods

This commit is contained in:
Gavin 2022-12-19 01:07:28 +01:00 committed by Gavin King
parent 32790456b7
commit 782d2c9707
15 changed files with 825 additions and 644 deletions

View File

@ -32,7 +32,7 @@ import org.hibernate.tool.schema.spi.SchemaManagementTool;
public class DatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext;
private final SqlStringGenerationContext context;
private final ExtractionContext extractionContext;
private final InformationExtractor extractor;
@ -41,15 +41,15 @@ public class DatabaseInformationImpl
public DatabaseInformationImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator,
SchemaManagementTool tool) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext;
this.context = context;
this.extractionContext = tool.getExtractionTool().createExtractionContext(
serviceRegistry,
jdbcEnvironment,
sqlStringGenerationContext,
context,
ddlTransactionIsolator,
this
);
@ -80,13 +80,13 @@ public class DatabaseInformationImpl
@Override
public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( sqlStringGenerationContext.catalogWithDefault( catalog ) );
return extractor.catalogExists( context.catalogWithDefault( catalog ) );
}
@Override
public boolean schemaExists(Namespace.Name namespace) {
return extractor.schemaExists( sqlStringGenerationContext.catalogWithDefault( namespace.getCatalog() ),
sqlStringGenerationContext.schemaWithDefault( namespace.getSchema() ) );
return extractor.schemaExists( context.catalogWithDefault( namespace.getCatalog() ),
context.schemaWithDefault( namespace.getSchema() ) );
}
@Override
@ -111,16 +111,16 @@ public class DatabaseInformationImpl
}
return extractor.getTable(
sqlStringGenerationContext.catalogWithDefault( tableName.getCatalogName() ),
sqlStringGenerationContext.schemaWithDefault( tableName.getSchemaName() ),
context.catalogWithDefault( tableName.getCatalogName() ),
context.schemaWithDefault( tableName.getSchemaName() ),
tableName.getTableName()
);
}
@Override
public NameSpaceTablesInformation getTablesInformation(Namespace namespace) {
return extractor.getTables( sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() ),
sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() ) );
return extractor.getTables( context.catalogWithDefault( namespace.getPhysicalName().getCatalog() ),
context.schemaWithDefault( namespace.getPhysicalName().getSchema() ) );
}
@Override

View File

@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
public class ExtractionContextImpl implements ExtractionContext {
private final ServiceRegistry serviceRegistry;
private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext;
private final SqlStringGenerationContext context;
private final JdbcConnectionAccess jdbcConnectionAccess;
private final DatabaseObjectAccess registeredTableAccess;
@ -33,12 +33,12 @@ public class ExtractionContextImpl implements ExtractionContext {
public ExtractionContextImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
JdbcConnectionAccess jdbcConnectionAccess,
DatabaseObjectAccess registeredTableAccess) {
this.serviceRegistry = serviceRegistry;
this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext;
this.context = context;
this.jdbcConnectionAccess = jdbcConnectionAccess;
this.registeredTableAccess = registeredTableAccess;
}
@ -55,7 +55,7 @@ public class ExtractionContextImpl implements ExtractionContext {
@Override
public SqlStringGenerationContext getSqlStringGenerationContext() {
return sqlStringGenerationContext;
return context;
}
@Override
@ -86,12 +86,12 @@ public class ExtractionContextImpl implements ExtractionContext {
@Override
public Identifier getDefaultCatalog() {
return sqlStringGenerationContext.getDefaultCatalog();
return context.getDefaultCatalog();
}
@Override
public Identifier getDefaultSchema() {
return sqlStringGenerationContext.getDefaultSchema();
return context.getDefaultSchema();
}
@Override

View File

@ -8,7 +8,6 @@ package org.hibernate.tool.schema.internal;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.stream.StreamSupport;
@ -62,6 +61,8 @@ import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.DRO
import static org.hibernate.tool.schema.UniqueConstraintSchemaUpdateStrategy.SKIP;
/**
* Base implementation of {@link SchemaMigrator}.
*
* @author Steve Ebersole
*/
public abstract class AbstractSchemaMigrator implements SchemaMigrator {

View File

@ -34,6 +34,8 @@ import org.hibernate.type.descriptor.JdbcTypeNameMapper;
import org.jboss.logging.Logger;
/**
* Base implementation of {@link SchemaValidator}.
*
* @author Steve Ebersole
*/
public abstract class AbstractSchemaValidator implements SchemaValidator {
@ -54,7 +56,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter) {
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap(
tool.getServiceRegistry().getService( JdbcEnvironment.class ),
metadata.getDatabase(),
options.getConfigurationValues()
@ -65,7 +67,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
isolator,
sqlStringGenerationContext,
context,
tool
);

View File

@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* to retrieve all the database table in order to determine if all the {@link jakarta.persistence.Entity} have a mapped database tables.
*
* @author Andrea Boriero
*/
public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
@ -50,7 +50,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
@ -76,12 +76,12 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = tables.getTableInformation( table );
if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets );
createTable( table, dialect, metadata, formatter, options, context, targets );
}
else if ( tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
}
}
}
@ -93,9 +93,9 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
}
}
}

View File

@ -48,7 +48,7 @@ public class Helper {
private static final Pattern COMMA_PATTERN = Pattern.compile( "\\s*,\\s*" );
public static ScriptSourceInput interpretScriptSourceSetting(
Object scriptSourceSetting,
Object scriptSourceSetting, //Reader or String URL
ClassLoaderService classLoaderService,
String charsetName ) {
if ( scriptSourceSetting instanceof Reader ) {
@ -169,14 +169,14 @@ public class Helper {
public static DatabaseInformation buildDatabaseInformation(
ServiceRegistry serviceRegistry,
DdlTransactionIsolator ddlTransactionIsolator,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
SchemaManagementTool tool) {
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
try {
return new DatabaseInformationImpl(
serviceRegistry,
jdbcEnvironment,
sqlStringGenerationContext,
context,
ddlTransactionIsolator,
tool
);

View File

@ -449,13 +449,13 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
public ExtractionContext createExtractionContext(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator,
ExtractionContext.DatabaseObjectAccess databaseObjectAccess) {
return new ImprovedExtractionContextImpl(
serviceRegistry,
jdbcEnvironment,
sqlStringGenerationContext,
context,
ddlTransactionIsolator,
databaseObjectAccess
);

View File

@ -24,10 +24,10 @@ import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* for each {@link jakarta.persistence.Entity} in order to determine if a corresponding database table exists.
*
* @author Andrea Boriero
*/
public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
@ -50,7 +50,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
@ -74,12 +74,12 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, sqlStringGenerationContext, targets );
createTable( table, dialect, metadata, formatter, options, context, targets );
}
else if ( tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
}
}
}
@ -91,9 +91,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options,
sqlStringGenerationContext, targets );
context, targets );
}
}
}

View File

@ -27,7 +27,6 @@ import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
@ -35,9 +34,6 @@ import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table;
@ -64,13 +60,17 @@ import org.hibernate.tool.schema.spi.SqlScriptCommandExtractor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CHARSET_NAME;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_IMPORT_FILES;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE;
import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize;
import static org.hibernate.internal.util.config.ConfigurationHelper.getString;
import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled;
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
/**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some
* additional stuff in the script).
* Basic implementation of {@link SchemaCreator}.
*
* @author Steve Ebersole
*/
@ -113,19 +113,18 @@ public class SchemaCreatorImpl implements SchemaCreator {
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets(
targetDescriptor,
jdbcContext,
options.getConfigurationValues(),
true
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final Map<String, Object> configuration = options.getConfigurationValues();
final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration );
doCreation(
metadata,
jdbcContext.getDialect(),
options,
contributableInclusionFilter,
sourceDescriptor,
tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true )
);
doCreation( metadata, jdbcContext.getDialect(), options, contributableInclusionFilter, sourceDescriptor, targets );
}
}
@Internal
@ -162,34 +161,38 @@ public class SchemaCreatorImpl implements SchemaCreator {
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final SqlScriptCommandExtractor commandExtractor = getCommandExtractor();
final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
switch ( sourceDescriptor.getSourceType() ) {
case SCRIPT: {
case SCRIPT:
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break;
}
case METADATA: {
case METADATA:
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
break;
}
case METADATA_THEN_SCRIPT: {
case METADATA_THEN_SCRIPT:
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break;
}
case SCRIPT_THEN_METADATA: {
case SCRIPT_THEN_METADATA:
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
break;
}
applyImportSources( options, commandExtractor, format, dialect, targets );
}
private SqlScriptCommandExtractor getCommandExtractor() {
return tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
}
private ClassLoaderService getClassLoaderService() {
return tool.getServiceRegistry().getService( ClassLoaderService.class );
}
public void createFromScript(
ScriptSourceInput scriptSourceInput,
SqlScriptCommandExtractor commandExtractor,
@ -200,9 +203,8 @@ public class SchemaCreatorImpl implements SchemaCreator {
final List<String> commands = scriptSourceInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
);
for ( int i = 0; i < commands.size(); i++ ) {
applySqlString( commands.get( i ), formatter, options, targets );
for ( String command : commands ) {
applySqlString( command, formatter, options, targets );
}
}
@ -223,6 +225,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
);
}
private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) {
final Database database = metadata.getDatabase();
return SqlStringGenerationContextImpl.fromConfigurationMap(
database.getJdbcEnvironment(),
database,
options.getConfigurationValues()
);
}
@Internal
public void createFromMetadata(
Metadata metadata,
@ -231,38 +242,292 @@ public class SchemaCreatorImpl implements SchemaCreator {
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false;
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToCreateSchemas = true;
final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata );
final Set<String> exportIdentifiers = setOfSize( 50 );
createSchemasAndCatalogs( metadata, options, dialect, formatter, context, targets );
// next, create all UDTs
createUserDefinedTypes( metadata, options, dialect, formatter, context, targets );
// next, create all "before table" auxiliary objects
createAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets );
// then, create all schema objects (tables, sequences, constraints, etc) in each schema
createSequencesTablesConstraints(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets
);
// foreign keys must be created after all tables of all namespaces for cross-namespace constraints (see HHH-10420)
createForeignKeys( metadata, options, contributableInclusionMatcher, dialect, formatter, context, targets );
// next, create all "after table" auxiliary objects
createAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, exportIdentifiers, targets );
// and finally add all init commands
executeInitCommands(metadata, options, formatter, targets);
}
if ( dialect.canCreateCatalog() ) {
tryToCreateCatalogs = true;
private static void executeInitCommands(Metadata metadata, ExecutionOptions options, Formatter formatter, GenerationTarget[] targets) {
for ( InitCommand initCommand : metadata.getDatabase().getInitCommands() ) {
// todo: this should alo probably use the DML formatter...
applySqlStrings( initCommand.getInitCommands(), formatter, options, targets);
}
}
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
jdbcEnvironment, database, options.getConfigurationValues() );
private static void createAuxiliaryObjectsAfterTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect )
&& !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ),
formatter,
options,
targets
);
}
}
}
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
private static void createForeignKeys(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
// foreign keys must be created after unique keys for numerous DBs (see HHH-8390)
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Table table : namespace.getTables() ) {
if ( options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
// foreign keys
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
}
}
private static void createSequencesTablesConstraints(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
// sequences
createSequences(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
// tables
createTables(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
createTableConstraints(
metadata,
options,
contributableInclusionMatcher,
dialect,
formatter,
context,
exportIdentifiers,
targets,
namespace
);
}
}
}
private static void createTableConstraints(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
// indexes
for ( Index index : table.getIndexes().values() ) {
checkExportIdentifier( index, exportIdentifiers );
applySqlStrings(
dialect.getIndexExporter().getSqlCreateStrings( index, metadata, context ),
formatter,
options,
targets
);
}
// unique keys
for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) {
checkExportIdentifier( uniqueKey, exportIdentifiers );
applySqlStrings(
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
private static void createTables(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& contributableInclusionMatcher.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createSequences(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets,
Namespace namespace) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( options.getSchemaFilter().includeSequence( sequence )
&& contributableInclusionMatcher.matches( sequence ) ) {
checkExportIdentifier( sequence, exportIdentifiers);
applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings( sequence, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createAuxiliaryObjectsBeforeTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
Set<String> exportIdentifiers,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : metadata.getDatabase().getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlCreateStrings( auxiliaryDatabaseObject, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void createUserDefinedTypes(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) {
applySqlStrings(
dialect.getUserDefinedTypeExporter()
.getSqlCreateStrings( userDefinedType, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
private static void createSchemasAndCatalogs(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
final boolean tryToCreateCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog();
final boolean tryToCreateSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema();
// first, create each catalog/schema
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName =
sqlStringGenerationContext.catalogWithDefault( namespace.getPhysicalName().getCatalog() );
context.catalogWithDefault( namespace.getPhysicalName().getCatalog() );
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ),
@ -275,7 +540,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
}
final Identifier schemaPhysicalName =
sqlStringGenerationContext.schemaWithDefault( namespace.getPhysicalName().getSchema() );
context.schemaWithDefault( namespace.getPhysicalName().getSchema() );
if ( tryToCreateSchemas && schemaPhysicalName != null ) {
applySqlStrings(
dialect.getCreateSchemaCommand( schemaPhysicalName.render( dialect ) ),
@ -286,201 +551,6 @@ public class SchemaCreatorImpl implements SchemaCreator {
}
}
}
// next, create all UDTs
for ( Namespace namespace : database.getNamespaces() ) {
if ( !options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( UserDefinedType userDefinedType : namespace.getDependencyOrderedUserDefinedTypes() ) {
applySqlStrings(
dialect.getUserDefinedTypeExporter().getSqlCreateStrings(
userDefinedType,
metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
// next, create all "before table" auxiliary objects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings(
auxiliaryDatabaseObject,
metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
// then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
// sequences
for ( Sequence sequence : namespace.getSequences() ) {
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata,
sqlStringGenerationContext
),
// dialect.getCreateSequenceStrings(
// jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), dialect ),
// sequence.getInitialValue(),
// sequence.getIncrementSize()
// ),
formatter,
options,
targets
);
}
// tables
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ){
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata, sqlStringGenerationContext ),
formatter,
options,
targets
);
}
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ){
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// indexes
for ( Index index : table.getIndexes().values() ) {
checkExportIdentifier( index, exportIdentifiers );
applySqlStrings(
dialect.getIndexExporter().getSqlCreateStrings( index, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
// unique keys
for ( UniqueKey uniqueKey : table.getUniqueKeys().values() ) {
checkExportIdentifier( uniqueKey, exportIdentifiers );
applySqlStrings(
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
//NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) {
// NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// foreign keys
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
// next, create all "after table" auxiliary objects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect )
&& !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( auxiliaryDatabaseObject, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
// and finally add all init commands
for ( InitCommand initCommand : database.getInitCommands() ) {
// todo: this should alo probably use the DML formatter...
applySqlStrings( initCommand.getInitCommands(), formatter, options, targets );
}
}
@ -497,26 +567,21 @@ public class SchemaCreatorImpl implements SchemaCreator {
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
if ( sqlStrings != null ) {
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
if ( isNotEmpty( sqlString ) ) {
try {
String sqlStringFormatted = formatter.format( sqlString );
final String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
target.accept( sqlStringFormatted );
}
@ -525,6 +590,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
options.getExceptionHandler().handleException( e );
}
}
}
private void applyImportSources(
ExecutionOptions options,
@ -532,54 +598,121 @@ public class SchemaCreatorImpl implements SchemaCreator {
boolean format,
Dialect dialect,
GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = tool.getServiceRegistry();
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
// I have had problems applying the formatter to these imported statements.
// and legacy SchemaExport did not format them, so doing same here
//final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter();
final Formatter formatter = FormatStyle.NONE.getFormatter();
final Formatter formatter = getImportScriptFormatter(format);
Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE );
if ( importScriptSetting == null ) {
importScriptSetting = options.getConfigurationValues().get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE );
boolean hasDefaultImportFileScriptBeenExecuted = applyImportScript(
options,
commandExtractor,
dialect,
formatter,
targets
);
applyImportFiles(
options,
commandExtractor,
dialect,
formatter,
hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE,
targets
);
}
String charsetName = (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME );
boolean hasDefaultImportFileScriptBeenExecuted = false;
/**
* In principle, we should format the commands in the import script if the
* {@code format} parameter is {@code true}, and since it's supposed to be
* a list of DML statements, we should use the {@linkplain FormatStyle#BASIC
* basic DML formatter} to do that. However, in practice we don't really know
* much about what this file contains, and we have never formatted it in the
* past, so there's no compelling reason to start now. In fact, if we have
* lists of many {@code insert} statements on the same table, which is what
* we typically expect, it's probably better to not format.
*/
private static Formatter getImportScriptFormatter(boolean format) {
// return format ? FormatStyle.BASIC.getFormatter() : FormatStyle.NONE.getFormatter();
return FormatStyle.NONE.getFormatter();
}
/**
* Handles import scripts specified using
* {@link org.hibernate.cfg.AvailableSettings#HBM2DDL_IMPORT_FILES}.
*
* @return {@code true} if the legacy {@linkplain #DEFAULT_IMPORT_FILE default import file}
* was one of the listed imported files that were executed
*/
private boolean applyImportScript(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
Formatter formatter,
GenerationTarget[] targets) {
final Object importScriptSetting = getImportScriptSetting( options );
if ( importScriptSetting != null ) {
final ScriptSourceInput importScriptInput = interpretScriptSourceSetting( importScriptSetting, classLoaderService, charsetName );
final URL defaultImportFileUrl = classLoaderService.locateResource( DEFAULT_IMPORT_FILE );
if ( defaultImportFileUrl != null && importScriptInput.containsScript( defaultImportFileUrl ) ) {
hasDefaultImportFileScriptBeenExecuted = true;
}
final List<String> commands = importScriptInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
final ScriptSourceInput importScriptInput =
interpretScriptSourceSetting( importScriptSetting, getClassLoaderService(), getCharsetName( options ) );
applyImportScript(
options,
commandExtractor,
dialect,
importScriptInput,
formatter,
targets
);
for ( int i = 0; i < commands.size(); i++ ) {
applySqlString( commands.get( i ), formatter, options, targets );
return containsDefaultImportFile( importScriptInput );
}
else {
return false;
}
}
final String importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
options.getConfigurationValues(),
hasDefaultImportFileScriptBeenExecuted ? "" : DEFAULT_IMPORT_FILE
);
private boolean containsDefaultImportFile(ScriptSourceInput importScriptInput) {
final URL defaultImportFileUrl = getClassLoaderService().locateResource( DEFAULT_IMPORT_FILE );
return defaultImportFileUrl != null && importScriptInput.containsScript(defaultImportFileUrl);
}
for ( String currentFile : importFiles.split( "," ) ) {
/**
* Handles import scripts specified using
* {@link org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE}.
*/
private void applyImportFiles(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
Formatter formatter,
String defaultImportFile,
GenerationTarget[] targets) {
final String[] importFiles =
getString( HBM2DDL_IMPORT_FILES, options.getConfigurationValues(), defaultImportFile )
.split( "," );
final String charsetName = getCharsetName( options );
final ClassLoaderService classLoaderService = getClassLoaderService();
for ( String currentFile : importFiles ) {
final String resourceName = currentFile.trim();
if ( resourceName.isEmpty() ) {
//skip empty resource names
continue;
if ( !resourceName.isEmpty() ) { //skip empty resource names
applyImportScript(
options,
commandExtractor,
dialect,
interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName ),
formatter,
targets
);
}
final ScriptSourceInput importScriptInput = interpretLegacyImportScriptSetting( resourceName, classLoaderService, charsetName );
}
}
private static void applyImportScript(
ExecutionOptions options,
SqlScriptCommandExtractor commandExtractor,
Dialect dialect,
ScriptSourceInput importScriptInput,
Formatter formatter,
GenerationTarget[] targets) {
final List<String> commands = importScriptInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
);
for ( int i = 0; i < commands.size(); i++ ) {
applySqlString( commands.get( i ), formatter, options, targets );
}
for ( String command : commands ) {
applySqlString( command, formatter, options, targets );
}
}
@ -589,18 +722,35 @@ public class SchemaCreatorImpl implements SchemaCreator {
String charsetName) {
try {
final URL resourceUrl = classLoaderService.locateResource( resourceName );
if ( resourceUrl == null ) {
return ScriptSourceInputNonExistentImpl.INSTANCE;
}
else {
return new ScriptSourceInputFromUrl( resourceUrl, charsetName );
}
return resourceUrl == null
? ScriptSourceInputNonExistentImpl.INSTANCE
: new ScriptSourceInputFromUrl( resourceUrl, charsetName );
}
catch (Exception e) {
throw new SchemaManagementException( "Error resolving legacy import resource : " + resourceName, e );
}
}
/**
* @see org.hibernate.cfg.AvailableSettings#HBM2DDL_CHARSET_NAME
*/
private static String getCharsetName(ExecutionOptions options) {
return (String) options.getConfigurationValues().get( HBM2DDL_CHARSET_NAME );
}
/**
* @see org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE
*
* @return a {@link java.io.Reader} or a string URL
*/
private static Object getImportScriptSetting(ExecutionOptions options) {
final Map<String, Object> configuration = options.getConfigurationValues();
final Object importScriptSetting = configuration.get( HBM2DDL_LOAD_SCRIPT_SOURCE );
return importScriptSetting == null
? configuration.get( JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE )
: importScriptSetting;
}
/**
* For testing...
*
@ -650,7 +800,9 @@ public class SchemaCreatorImpl implements SchemaCreator {
Metadata metadata,
final boolean manageNamespaces,
GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry();
final ServiceRegistry serviceRegistry =
( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
doCreation(
metadata,
serviceRegistry,

View File

@ -37,8 +37,6 @@ import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType;
@ -65,9 +63,12 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.collections.CollectionHelper.setOfSize;
import static org.hibernate.tool.schema.internal.Helper.interpretFormattingEnabled;
/**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some
* additional stuff in the script).
* Basic implementation of {@link SchemaDropper}.
*
* @author Steve Ebersole
*/
@ -105,18 +106,21 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
ContributableMatcher inclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final Map<String, Object> configuration = options.getConfigurationValues();
final JdbcContext jdbcContext = tool.resolveJdbcContext( configuration );
doDrop(
metadata,
options,
inclusionFilter,
jdbcContext.getDialect(),
sourceDescriptor,
tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration, true )
);
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), sourceDescriptor, targets );
}
/**
@ -129,7 +133,7 @@ public class SchemaDropperImpl implements SchemaDropper {
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
doDrop( metadata, options, (contributed) -> true, dialect, sourceDescriptor, targets );
doDrop( metadata, options, contributed -> true, dialect, sourceDescriptor, targets );
}
/**
@ -139,7 +143,7 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
ContributableMatcher inclusionFilter,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
@ -148,7 +152,7 @@ public class SchemaDropperImpl implements SchemaDropper {
}
try {
performDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, targets );
performDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, targets );
}
finally {
for ( GenerationTarget target : targets ) {
@ -165,30 +169,36 @@ public class SchemaDropperImpl implements SchemaDropper {
private void performDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
ContributableMatcher inclusionFilter,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final SqlScriptCommandExtractor commandExtractor = getCommandExtractor();
final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
if ( sourceDescriptor.getSourceType() == SourceType.SCRIPT ) {
switch ( sourceDescriptor.getSourceType() ) {
case SCRIPT:
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) {
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) {
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
break;
case METADATA:
dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
break;
case METADATA_THEN_SCRIPT:
dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
}
else {
break;
case SCRIPT_THEN_METADATA:
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
dropFromMetadata( metadata, options, inclusionFilter, dialect, formatter, targets );
break;
}
}
private SqlScriptCommandExtractor getCommandExtractor() {
return tool.getServiceRegistry().getService(SqlScriptCommandExtractor.class);
}
private void dropFromScript(
ScriptSourceInput scriptSourceInput,
SqlScriptCommandExtractor commandExtractor,
@ -199,175 +209,245 @@ public class SchemaDropperImpl implements SchemaDropper {
final List<String> commands = scriptSourceInput.extract(
reader -> commandExtractor.extractCommands( reader, dialect )
);
for ( int i = 0; i < commands.size(); i++ ) {
applySqlString( commands.get( i ), formatter, options, targets );
for ( String command : commands ) {
applySqlString( command, formatter, options, targets );
}
}
private static SqlStringGenerationContext createSqlStringGenerationContext(ExecutionOptions options, Metadata metadata) {
final Database database = metadata.getDatabase();
return SqlStringGenerationContextImpl.fromConfigurationMap(
database.getJdbcEnvironment(),
database,
options.getConfigurationValues()
);
}
private void dropFromMetadata(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues());
boolean tryToDropCatalogs = false;
boolean tryToDropSchemas = false;
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToDropSchemas = true;
}
if ( dialect.canCreateCatalog() ) {
tryToDropCatalogs = true;
}
}
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
// NOTE : init commands are irrelevant for dropping...
final SqlStringGenerationContext context = createSqlStringGenerationContext( options, metadata );
// Reverse the list on drop to retain possible dependencies
final Collection<AuxiliaryDatabaseObject> reversedAuxiliaryDatabaseObjects = reverse( database.getAuxiliaryDatabaseObjects() );
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
continue;
}
applySqlStrings(
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata,
sqlStringGenerationContext
),
formatter,
dropAuxiliaryObjectsBeforeTables( metadata, options, dialect, formatter, context, targets );
dropConstraintsTablesSequences(
metadata,
options,
inclusionFilter,
dialect,
formatter,
context,
targets
);
dropAuxiliaryObjectsAfterTables( metadata, options, dialect, formatter, context, targets );
dropUserDefinedTypes( metadata, options, dialect, formatter, context, targets );
dropSchemasAndCatalogs( metadata, options, dialect, formatter, targets );
}
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
private void dropConstraintsTablesSequences(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
final Set<String> exportIdentifiers = setOfSize( 50 );
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping( namespace, metadata, formatter, options, sqlStringGenerationContext,
contributableInclusionFilter, targets );
applyConstraintDropping(
namespace,
metadata,
formatter,
options,
context,
inclusionFilter,
targets
);
// now it's safe to drop the tables
for ( Table table : namespace.getTables() ) {
if ( ! table.isPhysicalTable() ) {
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
dropTables(
metadata,
options,
inclusionFilter,
dialect,
formatter,
exportIdentifiers,
context,
namespace,
targets
);
applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata,
sqlStringGenerationContext
), formatter, options,targets );
dropSequences(
metadata,
options,
inclusionFilter,
dialect,
formatter,
exportIdentifiers,
context,
namespace,
targets
);
}
for ( Sequence sequence : namespace.getSequences() ) {
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata,
sqlStringGenerationContext
), formatter, options, targets );
}
}
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : reversedAuxiliaryDatabaseObjects ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
continue;
}
private static void dropAuxiliaryObjectsBeforeTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject :
reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect(dialect) ) {
applySqlStrings(
auxiliaryDatabaseObject.sqlDropStrings( sqlStringGenerationContext ),
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlDropStrings( auxiliaryDatabaseObject, metadata, context ),
formatter,
options,
targets
);
}
for ( Namespace namespace : database.getNamespaces() ) {
if ( !options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
}
private static void dropAuxiliaryObjectsAfterTables(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject :
reverse( metadata.getDatabase().getAuxiliaryDatabaseObjects() ) ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation()
&& auxiliaryDatabaseObject.appliesToDialect(dialect) ) {
applySqlStrings(
auxiliaryDatabaseObject.sqlDropStrings( context ),
formatter,
options,
targets
);
}
}
}
private static void dropSequences(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
SqlStringGenerationContext context,
Namespace namespace,
GenerationTarget[] targets) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( options.getSchemaFilter().includeSequence( sequence )
&& inclusionFilter.matches( sequence ) ) {
checkExportIdentifier( sequence, exportIdentifiers);
applySqlStrings(
dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata, context ),
formatter,
options,
targets
);
}
}
}
private static void dropTables(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
SqlStringGenerationContext context,
Namespace namespace,
GenerationTarget[] targets) {
for ( Table table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& inclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers);
applySqlStrings(
dialect.getTableExporter().getSqlDropStrings( table, metadata, context),
formatter,
options,
targets
);
}
}
}
private static void dropUserDefinedTypes(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
SqlStringGenerationContext context,
GenerationTarget[] targets) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
final List<UserDefinedType> dependencyOrderedUserDefinedTypes = namespace.getDependencyOrderedUserDefinedTypes();
Collections.reverse( dependencyOrderedUserDefinedTypes );
for ( UserDefinedType userDefinedType : dependencyOrderedUserDefinedTypes ) {
applySqlStrings(
dialect.getUserDefinedTypeExporter()
.getSqlDropStrings( userDefinedType, metadata, sqlStringGenerationContext ),
.getSqlDropStrings( userDefinedType, metadata, context ),
formatter,
options,
targets
);
}
}
if ( tryToDropCatalogs || tryToDropSchemas ) {
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
}
private static void dropSchemasAndCatalogs(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
GenerationTarget[] targets) {
boolean tryToDropCatalogs = options.shouldManageNamespaces() && dialect.canCreateCatalog();
boolean tryToDropSchemas = options.shouldManageNamespaces() && dialect.canCreateSchema();
if ( tryToDropCatalogs || tryToDropSchemas) {
final Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) {
applySqlStrings(
dialect.getDropSchemaCommand(
namespace.getPhysicalName().getSchema().render( dialect )
),
formatter,
options,
targets
);
final String schemaName = namespace.getPhysicalName().getSchema().render( dialect );
applySqlStrings( dialect.getDropSchemaCommand( schemaName ), formatter, options, targets);
}
if ( tryToDropCatalogs ) {
if (tryToDropCatalogs) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
dialect.getDropCatalogCommand(
catalogPhysicalName.render( dialect )
),
formatter,
options,
targets
);
final String catalogName = catalogPhysicalName.render( dialect );
applySqlStrings( dialect.getDropCatalogCommand( catalogName ), formatter, options, targets );
exportedCatalogs.add( catalogLogicalName );
}
}
}
}
}
}
private Collection<AuxiliaryDatabaseObject> reverse(Collection<AuxiliaryDatabaseObject> auxiliaryDatabaseObjects) {
private static Collection<AuxiliaryDatabaseObject> reverse(Collection<AuxiliaryDatabaseObject> auxiliaryDatabaseObjects) {
final List<AuxiliaryDatabaseObject> list = new ArrayList<>( auxiliaryDatabaseObjects );
Collections.reverse( list );
return list;
@ -378,31 +458,18 @@ public class SchemaDropperImpl implements SchemaDropper {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
ContributableMatcher contributableInclusionFilter,
SqlStringGenerationContext context,
ContributableMatcher inclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
if ( !dialect.dropConstraints() ) {
return;
}
if ( dialect.dropConstraints() ) {
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
if ( table.isPhysicalTable()
&& options.getSchemaFilter().includeTable( table )
&& inclusionFilter.matches( table ) ) {
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata,
sqlStringGenerationContext
),
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata, context ),
formatter,
options,
targets
@ -410,6 +477,8 @@ public class SchemaDropperImpl implements SchemaDropper {
}
}
}
}
}
private static void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
@ -424,25 +493,20 @@ public class SchemaDropperImpl implements SchemaDropper {
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
if ( sqlStrings != null ) {
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
String sqlStringFormatted = formatter.format( sqlString );
if ( isNotEmpty( sqlString ) ) {
final String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlStringFormatted );
@ -452,59 +516,17 @@ public class SchemaDropperImpl implements SchemaDropper {
}
}
}
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateDropCommands(Metadata metadata, final boolean manageNamespaces) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map<String,Object> getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
};
dropFromMetadata( metadata, options, (contributed) -> true, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
@Override
public DelayedDropAction buildDelayedAction(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
ContributableMatcher inclusionFilter,
SourceDescriptor sourceDescriptor) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect();
doDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, target );
doDrop( metadata, options, inclusionFilter, dialect, sourceDescriptor, target );
return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() );
}
@ -512,7 +534,9 @@ public class SchemaDropperImpl implements SchemaDropper {
* For tests
*/
public void doDrop(Metadata metadata, boolean manageNamespaces, GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry();
final ServiceRegistry serviceRegistry =
( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
doDrop(
metadata,
serviceRegistry,
@ -535,7 +559,8 @@ public class SchemaDropperImpl implements SchemaDropper {
final JdbcContext jdbcContext = tool.resolveJdbcContext( settings );
targets = new GenerationTarget[] {
new GenerationTargetToDatabase(
serviceRegistry.getService( TransactionCoordinatorBuilder.class ).buildDdlTransactionIsolator( jdbcContext ),
serviceRegistry.getService( TransactionCoordinatorBuilder.class )
.buildDdlTransactionIsolator( jdbcContext ),
true
)
};

View File

@ -49,6 +49,8 @@ import static org.hibernate.cfg.AvailableSettings.JAKARTA_HBM2DDL_LOAD_SCRIPT_SO
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
/**
* Basic implementation of {@link SchemaTruncator}.
*
* @author Gavin King
*/
public class SchemaTruncatorImpl implements SchemaTruncator {
@ -119,7 +121,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Formatter formatter,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
SqlStringGenerationContext context = SqlStringGenerationContextImpl.fromConfigurationMap(
metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues() );
@ -131,7 +133,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
continue;
}
disableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext,
disableConstraints( namespace, metadata, formatter, options, context,
contributableInclusionFilter, targets );
applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options,targets );
@ -151,7 +153,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
list.add( table );
}
applySqlStrings( dialect.getTableCleaner().getSqlTruncateStrings( list, metadata,
sqlStringGenerationContext
context
), formatter, options,targets );
//TODO: reset the sequences?
@ -165,12 +167,12 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
// checkExportIdentifier( sequence, exportIdentifiers );
//
// applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata,
// sqlStringGenerationContext
// context
// ), formatter, options, targets );
// }
applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options,targets );
enableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext,
enableConstraints( namespace, metadata, formatter, options, context,
contributableInclusionFilter, targets );
}
@ -184,7 +186,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -204,7 +206,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner().getSqlDisableConstraintString( foreignKey, metadata,
sqlStringGenerationContext
context
),
formatter,
options,
@ -214,7 +216,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata,
sqlStringGenerationContext
context
),
formatter,
options,
@ -230,7 +232,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -250,7 +252,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata,
sqlStringGenerationContext
context
),
formatter,
options,
@ -260,7 +262,7 @@ public class SchemaTruncatorImpl implements SchemaTruncator {
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata,
sqlStringGenerationContext
context
),
formatter,
options,

View File

@ -50,8 +50,8 @@ public class StandardTableMigrator implements TableMigrator {
Table table,
Metadata metadata,
TableInformation tableInfo,
SqlStringGenerationContext sqlStringGenerationContext) {
return sqlAlterStrings( table, dialect, metadata, tableInfo, sqlStringGenerationContext )
SqlStringGenerationContext context) {
return sqlAlterStrings( table, dialect, metadata, tableInfo, context )
.toArray( EMPTY_STRING_ARRAY );
}
@ -61,9 +61,9 @@ public class StandardTableMigrator implements TableMigrator {
Dialect dialect,
Metadata metadata,
TableInformation tableInformation,
SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException {
SqlStringGenerationContext context) throws HibernateException {
final String tableName = sqlStringGenerationContext.format( new QualifiedTableName(
final String tableName = context.format( new QualifiedTableName(
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
table.getNameIdentifier() )
@ -80,7 +80,7 @@ public class StandardTableMigrator implements TableMigrator {
if ( columnInformation == null ) {
// the column doesn't exist at all.
final String addColumn = dialect.getAddColumnString() + ' '
+ getFullColumnDeclaration( column, table, metadata, dialect, sqlStringGenerationContext )
+ getFullColumnDeclaration( column, table, metadata, dialect, context )
+ dialect.getAddColumnSuffixString();
results.add( alterTable + addColumn );
}

View File

@ -24,5 +24,5 @@ public interface TableMigrator {
Table table,
Metadata metadata,
TableInformation tableInfo,
SqlStringGenerationContext sqlStringGenerationContext);
SqlStringGenerationContext context);
}

View File

@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
public class ImprovedExtractionContextImpl implements ExtractionContext {
private final ServiceRegistry serviceRegistry;
private final JdbcEnvironment jdbcEnvironment;
private final SqlStringGenerationContext sqlStringGenerationContext;
private final SqlStringGenerationContext context;
private final DdlTransactionIsolator ddlTransactionIsolator;
private final DatabaseObjectAccess databaseObjectAccess;
@ -33,12 +33,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
public ImprovedExtractionContextImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator,
DatabaseObjectAccess databaseObjectAccess) {
this.serviceRegistry = serviceRegistry;
this.jdbcEnvironment = jdbcEnvironment;
this.sqlStringGenerationContext = sqlStringGenerationContext;
this.context = context;
this.ddlTransactionIsolator = ddlTransactionIsolator;
this.databaseObjectAccess = databaseObjectAccess;
}
@ -55,7 +55,7 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
@Override
public SqlStringGenerationContext getSqlStringGenerationContext() {
return sqlStringGenerationContext;
return context;
}
@Override
@ -81,12 +81,12 @@ public class ImprovedExtractionContextImpl implements ExtractionContext {
@Override
public Identifier getDefaultCatalog() {
return sqlStringGenerationContext.getDefaultCatalog();
return context.getDefaultCatalog();
}
@Override
public Identifier getDefaultSchema() {
return sqlStringGenerationContext.getDefaultSchema();
return context.getDefaultSchema();
}
@Override

View File

@ -7,7 +7,6 @@
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
@ -27,7 +26,7 @@ public interface ExtractionTool {
ExtractionContext createExtractionContext(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
SqlStringGenerationContext sqlStringGenerationContext,
SqlStringGenerationContext context,
DdlTransactionIsolator ddlTransactionIsolator,
ExtractionContext.DatabaseObjectAccess databaseObjectAccess);