introduce TableMigrator and kill some code duplication
This commit is contained in:
parent
f2576d6b87
commit
6348d9927e
|
@ -45,7 +45,6 @@ import org.hibernate.NotYetImplementedFor6Exception;
|
|||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.boot.TempTableDdlTransactionHandling;
|
||||
import org.hibernate.boot.model.TypeContributions;
|
||||
import org.hibernate.boot.model.convert.spi.ConverterRegistry;
|
||||
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
|
||||
import org.hibernate.boot.model.relational.Sequence;
|
||||
import org.hibernate.boot.spi.SessionFactoryOptions;
|
||||
|
@ -153,7 +152,9 @@ import org.hibernate.tool.schema.internal.StandardIndexExporter;
|
|||
import org.hibernate.tool.schema.internal.StandardSequenceExporter;
|
||||
import org.hibernate.tool.schema.internal.StandardTableCleaner;
|
||||
import org.hibernate.tool.schema.internal.StandardTableExporter;
|
||||
import org.hibernate.tool.schema.internal.StandardTableMigrator;
|
||||
import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter;
|
||||
import org.hibernate.tool.schema.internal.TableMigrator;
|
||||
import org.hibernate.tool.schema.spi.Cleaner;
|
||||
import org.hibernate.tool.schema.spi.Exporter;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||
|
@ -2488,12 +2489,17 @@ public abstract class Dialect implements ConversionContext {
|
|||
private final StandardUniqueKeyExporter uniqueKeyExporter = new StandardUniqueKeyExporter( this );
|
||||
private final StandardAuxiliaryDatabaseObjectExporter auxiliaryObjectExporter = new StandardAuxiliaryDatabaseObjectExporter( this );
|
||||
private final StandardTemporaryTableExporter temporaryTableExporter = new StandardTemporaryTableExporter( this );
|
||||
private final StandardTableMigrator tableMigrator = new StandardTableMigrator( this );
|
||||
private final StandardTableCleaner tableCleaner = new StandardTableCleaner( this );
|
||||
|
||||
public Exporter<Table> getTableExporter() {
|
||||
return tableExporter;
|
||||
}
|
||||
|
||||
public TableMigrator getTableMigrator() {
|
||||
return tableMigrator;
|
||||
}
|
||||
|
||||
public Cleaner getTableCleaner() {
|
||||
return tableCleaner;
|
||||
}
|
||||
|
|
|
@ -869,10 +869,11 @@ public final class StringHelper {
|
|||
* Return the interned form of a String, or null if the parameter is null.
|
||||
* <p>
|
||||
* Use with caution: excessive interning is known to cause issues.
|
||||
* Best to use only with strings which are known to be long lived constants,
|
||||
* Best to use only with strings which are known to be long-lived constants,
|
||||
* and for which the chances of being actual duplicates is proven.
|
||||
* (Even better: avoid needing interning by design changes such as reusing
|
||||
* the known reference)
|
||||
*
|
||||
* @param string The string to intern.
|
||||
* @return The interned string.
|
||||
*/
|
||||
|
|
|
@ -31,9 +31,9 @@ import org.hibernate.boot.model.relational.QualifiedTableName;
|
|||
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
|
||||
import org.hibernate.boot.spi.InFlightMetadataCollector;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
|
||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||
|
||||
import org.hibernate.tool.schema.internal.StandardTableMigrator;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
|
@ -160,6 +160,14 @@ public class Table implements Serializable, ContributableDatabaseObject {
|
|||
return name;
|
||||
}
|
||||
|
||||
public Identifier getSchemaIdentifier() {
|
||||
return schema;
|
||||
}
|
||||
|
||||
public Identifier getCatalogIdentifier() {
|
||||
return catalog;
|
||||
}
|
||||
|
||||
public String getQuotedName() {
|
||||
return name == null ? null : name.toString();
|
||||
}
|
||||
|
@ -432,88 +440,14 @@ public class Table implements Serializable, ContributableDatabaseObject {
|
|||
}
|
||||
}
|
||||
|
||||
@Deprecated(since = "6.2") @Remove
|
||||
public Iterator<String> sqlAlterStrings(
|
||||
Dialect dialect,
|
||||
Metadata metadata,
|
||||
TableInformation tableInfo,
|
||||
SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException {
|
||||
final String tableName = sqlStringGenerationContext.format( new QualifiedTableName( catalog, schema, name ) );
|
||||
|
||||
final StringBuilder root = new StringBuilder( dialect.getAlterTableString( tableName ) )
|
||||
.append( ' ' )
|
||||
.append( dialect.getAddColumnString() );
|
||||
|
||||
final List<String> results = new ArrayList<>();
|
||||
|
||||
for ( Column column : getColumns() ) {
|
||||
final ColumnInformation columnInfo = tableInfo.getColumn(
|
||||
Identifier.toIdentifier( column.getName(), column.isQuoted() )
|
||||
);
|
||||
|
||||
if ( columnInfo == null ) {
|
||||
// the column doesn't exist at all.
|
||||
final StringBuilder alter = new StringBuilder( root.toString() )
|
||||
.append( ' ' )
|
||||
.append( column.getQuotedName( dialect ) );
|
||||
|
||||
final String columnType = column.getSqlType(
|
||||
metadata.getDatabase().getTypeConfiguration(),
|
||||
dialect,
|
||||
metadata
|
||||
);
|
||||
if ( column.hasSpecializedTypeDeclaration() ) {
|
||||
alter.append( ' ' ).append( column.getSpecializedTypeDeclaration() );
|
||||
}
|
||||
else if ( column.getGeneratedAs() == null || dialect.hasDataTypeBeforeGeneratedAs() ) {
|
||||
alter.append(' ').append(columnType);
|
||||
}
|
||||
|
||||
final String defaultValue = column.getDefaultValue();
|
||||
if ( defaultValue != null ) {
|
||||
alter.append( " default " ).append( defaultValue );
|
||||
}
|
||||
|
||||
final String generatedAs = column.getGeneratedAs();
|
||||
if ( generatedAs != null) {
|
||||
alter.append( dialect.generatedAs( generatedAs ) );
|
||||
}
|
||||
|
||||
if ( column.isNullable() ) {
|
||||
alter.append( dialect.getNullColumnString( columnType ) );
|
||||
}
|
||||
else {
|
||||
alter.append( " not null" );
|
||||
}
|
||||
|
||||
if ( column.isUnique() && !isPrimaryKey( column ) ) {
|
||||
String keyName = Constraint.generateName( "UK_", this, column );
|
||||
UniqueKey uk = getOrCreateUniqueKey( keyName );
|
||||
uk.addColumn( column );
|
||||
alter.append( dialect.getUniqueDelegate()
|
||||
.getColumnDefinitionUniquenessFragment( column, sqlStringGenerationContext ) );
|
||||
}
|
||||
|
||||
if ( column.hasCheckConstraint() && dialect.supportsColumnCheck() ) {
|
||||
alter.append( column.checkConstraint() );
|
||||
}
|
||||
|
||||
final String columnComment = column.getComment();
|
||||
if ( columnComment != null ) {
|
||||
alter.append( dialect.getColumnComment( columnComment ) );
|
||||
}
|
||||
|
||||
alter.append( dialect.getAddColumnSuffixString() );
|
||||
|
||||
results.add( alter.toString() );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( results.isEmpty() ) {
|
||||
log.debugf( "No alter strings for table : %s", getQuotedName() );
|
||||
}
|
||||
|
||||
return results.iterator();
|
||||
return StandardTableMigrator.sqlAlterStrings(this, dialect, metadata, tableInfo, sqlStringGenerationContext )
|
||||
.iterator();
|
||||
}
|
||||
|
||||
public boolean isPrimaryKey(Column column) {
|
||||
|
|
|
@ -186,14 +186,14 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
|
||||
|
||||
final Database database = metadata.getDatabase();
|
||||
Exporter<AuxiliaryDatabaseObject> auxiliaryExporter = dialect.getAuxiliaryDatabaseObjectExporter();
|
||||
|
||||
// Drop all AuxiliaryDatabaseObjects
|
||||
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
||||
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
||||
applySqlStrings(
|
||||
true,
|
||||
dialect.getAuxiliaryDatabaseObjectExporter()
|
||||
.getSqlDropStrings( auxiliaryDatabaseObject, metadata, sqlGenerationContext ),
|
||||
auxiliaryExporter.getSqlDropStrings( auxiliaryDatabaseObject, metadata, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
|
@ -207,7 +207,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
&& auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
||||
applySqlStrings(
|
||||
true,
|
||||
auxiliaryDatabaseObject.sqlCreateStrings( sqlGenerationContext ),
|
||||
auxiliaryExporter.getSqlCreateStrings( auxiliaryDatabaseObject, metadata, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
|
@ -246,18 +246,10 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
|
||||
for ( Sequence sequence : namespace.getSequences() ) {
|
||||
if ( contributableInclusionFilter.matches( sequence ) ) {
|
||||
checkExportIdentifier( sequence, exportIdentifiers );
|
||||
final SequenceInformation sequenceInformation =
|
||||
existingDatabase.getSequenceInformation( sequence.getName() );
|
||||
checkExportIdentifier( sequence, exportIdentifiers);
|
||||
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
|
||||
if ( sequenceInformation == null ) {
|
||||
applySqlStrings(
|
||||
false,
|
||||
dialect.getSequenceExporter()
|
||||
.getSqlCreateStrings( sequence, metadata, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
);
|
||||
applySequence( sequence, dialect, metadata, formatter, options, sqlGenerationContext, targets );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -269,17 +261,12 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
|
||||
final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace );
|
||||
for ( Table table : namespace.getTables() ) {
|
||||
if ( ! options.getSchemaFilter().includeTable( table ) ) {
|
||||
continue;
|
||||
}
|
||||
if ( ! contributableInclusionFilter.matches( table ) ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
|
||||
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
|
||||
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options,
|
||||
sqlGenerationContext, targets );
|
||||
if ( options.getSchemaFilter().includeTable( table ) && contributableInclusionFilter.matches( table ) ) {
|
||||
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
|
||||
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
|
||||
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options,
|
||||
sqlGenerationContext, targets );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,10 +274,10 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
|
||||
// Create after-table AuxiliaryDatabaseObjects
|
||||
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
||||
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect )) {
|
||||
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
||||
applySqlStrings(
|
||||
true,
|
||||
auxiliaryDatabaseObject.sqlCreateStrings( sqlGenerationContext ),
|
||||
auxiliaryExporter.getSqlCreateStrings( auxiliaryDatabaseObject, metadata, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
|
@ -299,6 +286,23 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
}
|
||||
}
|
||||
|
||||
private static void applySequence(
|
||||
Sequence sequence,
|
||||
Dialect dialect,
|
||||
Metadata metadata,
|
||||
Formatter formatter,
|
||||
ExecutionOptions options,
|
||||
SqlStringGenerationContext sqlGenerationContext,
|
||||
GenerationTarget... targets) {
|
||||
applySqlStrings(
|
||||
false,
|
||||
dialect.getSequenceExporter().getSqlCreateStrings( sequence, metadata, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
);
|
||||
}
|
||||
|
||||
protected void createTable(
|
||||
Table table,
|
||||
Dialect dialect,
|
||||
|
@ -327,12 +331,8 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
GenerationTarget... targets) {
|
||||
applySqlStrings(
|
||||
false,
|
||||
table.sqlAlterStrings(
|
||||
dialect,
|
||||
metadata,
|
||||
tableInformation,
|
||||
sqlGenerationContext
|
||||
),
|
||||
dialect.getTableMigrator()
|
||||
.getSqlAlterStrings( table, metadata, tableInformation, sqlGenerationContext ),
|
||||
formatter,
|
||||
options,
|
||||
targets
|
||||
|
@ -576,17 +576,4 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void applySqlStrings(
|
||||
boolean quiet,
|
||||
Iterator<String> sqlStrings,
|
||||
Formatter formatter,
|
||||
ExecutionOptions options,
|
||||
GenerationTarget... targets) {
|
||||
if ( sqlStrings != null ) {
|
||||
while ( sqlStrings.hasNext() ) {
|
||||
applySqlString( quiet, sqlStrings.next(), formatter, options, targets );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
package org.hibernate.tool.schema.internal;
|
||||
|
||||
import org.hibernate.Incubating;
|
||||
import org.hibernate.boot.Metadata;
|
||||
import org.hibernate.boot.model.naming.Identifier;
|
||||
import org.hibernate.boot.model.relational.QualifiedNameParser;
|
||||
|
@ -24,6 +25,7 @@ import java.util.stream.Collectors;
|
|||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
@Incubating
|
||||
public class StandardTableCleaner implements Cleaner {
|
||||
protected final Dialect dialect;
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
package org.hibernate.tool.schema.internal;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.MappingException;
|
||||
|
@ -20,19 +19,22 @@ import org.hibernate.boot.model.relational.QualifiedTableName;
|
|||
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
|
||||
import org.hibernate.boot.spi.MetadataImplementor;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.mapping.Column;
|
||||
import org.hibernate.mapping.Constraint;
|
||||
import org.hibernate.mapping.Table;
|
||||
import org.hibernate.mapping.UniqueKey;
|
||||
import org.hibernate.tool.schema.spi.Exporter;
|
||||
|
||||
import static java.util.Collections.addAll;
|
||||
import static org.hibernate.internal.util.StringHelper.EMPTY_STRINGS;
|
||||
|
||||
/**
|
||||
* An {@link Exporter} for {@linkplain Table tables}.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class StandardTableExporter implements Exporter<Table> {
|
||||
|
||||
protected final Dialect dialect;
|
||||
|
||||
public StandardTableExporter(Dialect dialect) {
|
||||
|
@ -44,39 +46,17 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
Table table,
|
||||
Metadata metadata,
|
||||
SqlStringGenerationContext context) {
|
||||
final QualifiedName tableName = new QualifiedNameParser.NameParts(
|
||||
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
|
||||
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
|
||||
table.getNameIdentifier()
|
||||
);
|
||||
final QualifiedName tableName = getTableName(table);
|
||||
|
||||
try {
|
||||
String formattedTableName = context.format( tableName );
|
||||
StringBuilder createTable =
|
||||
final String formattedTableName = context.format( tableName );
|
||||
|
||||
final StringBuilder createTable =
|
||||
new StringBuilder( tableCreateString( table.hasPrimaryKey() ) )
|
||||
.append( ' ' )
|
||||
.append( formattedTableName )
|
||||
.append( " (" );
|
||||
|
||||
|
||||
boolean isPrimaryKeyIdentity = table.hasPrimaryKey()
|
||||
&& table.getIdentifierValue() != null
|
||||
&& table.getIdentifierValue().isIdentityColumn(
|
||||
( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
|
||||
.getIdentifierGeneratorFactory(),
|
||||
dialect
|
||||
);
|
||||
// TODO: this is the much better form moving forward as we move to metamodel
|
||||
//boolean isPrimaryKeyIdentity = hasPrimaryKey
|
||||
// && table.getPrimaryKey().getColumnSpan() == 1
|
||||
// && table.getPrimaryKey().getColumn( 0 ).isIdentity();
|
||||
|
||||
// Try to find out the name of the primary key in case the dialect needs it to create an identity
|
||||
String pkColName = null;
|
||||
if ( table.hasPrimaryKey() ) {
|
||||
pkColName = table.getPrimaryKey().getColumns().get(0).getQuotedName( dialect );
|
||||
}
|
||||
|
||||
boolean isFirst = true;
|
||||
for ( Column column : table.getColumns() ) {
|
||||
if ( isFirst ) {
|
||||
|
@ -85,75 +65,10 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
else {
|
||||
createTable.append( ", " );
|
||||
}
|
||||
|
||||
String colName = column.getQuotedName( dialect );
|
||||
createTable.append( colName );
|
||||
|
||||
if ( isPrimaryKeyIdentity && colName.equals( pkColName ) ) {
|
||||
// to support dialects that have their own identity data type
|
||||
if ( dialect.getIdentityColumnSupport().hasDataTypeInIdentityColumn() ) {
|
||||
createTable.append( ' ' ).append(
|
||||
column.getSqlType( metadata.getDatabase().getTypeConfiguration(), dialect, metadata )
|
||||
);
|
||||
}
|
||||
String identityColumnString = dialect.getIdentityColumnSupport()
|
||||
.getIdentityColumnString( column.getSqlTypeCode(metadata) );
|
||||
createTable.append( ' ' ).append( identityColumnString );
|
||||
}
|
||||
else {
|
||||
final String columnType = column.getSqlType(
|
||||
metadata.getDatabase().getTypeConfiguration(),
|
||||
dialect,
|
||||
metadata
|
||||
);
|
||||
if ( column.hasSpecializedTypeDeclaration() ) {
|
||||
createTable.append( ' ' ).append( column.getSpecializedTypeDeclaration() );
|
||||
}
|
||||
else if ( column.getGeneratedAs() == null || dialect.hasDataTypeBeforeGeneratedAs() ) {
|
||||
createTable.append( ' ' ).append( columnType );
|
||||
}
|
||||
|
||||
String defaultValue = column.getDefaultValue();
|
||||
if ( defaultValue != null ) {
|
||||
createTable.append( " default " ).append( defaultValue );
|
||||
}
|
||||
|
||||
String generatedAs = column.getGeneratedAs();
|
||||
if ( generatedAs != null) {
|
||||
createTable.append( dialect.generatedAs( generatedAs ) );
|
||||
}
|
||||
|
||||
if ( column.isNullable() ) {
|
||||
createTable.append( dialect.getNullColumnString( columnType ) );
|
||||
}
|
||||
else {
|
||||
createTable.append( " not null" );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( column.isUnique() && !table.isPrimaryKey( column ) ) {
|
||||
String keyName = Constraint.generateName( "UK_", table, column );
|
||||
UniqueKey uk = table.getOrCreateUniqueKey( keyName );
|
||||
uk.addColumn( column );
|
||||
createTable.append(
|
||||
dialect.getUniqueDelegate()
|
||||
.getColumnDefinitionUniquenessFragment( column, context )
|
||||
);
|
||||
}
|
||||
|
||||
if ( dialect.supportsColumnCheck() && column.hasCheckConstraint() ) {
|
||||
createTable.append( column.checkConstraint() );
|
||||
}
|
||||
|
||||
String columnComment = column.getComment();
|
||||
if ( columnComment != null ) {
|
||||
createTable.append( dialect.getColumnComment( columnComment ) );
|
||||
}
|
||||
appendColumn( createTable, column, table, metadata, dialect, context );
|
||||
}
|
||||
if ( table.hasPrimaryKey() ) {
|
||||
createTable.append( ", " )
|
||||
.append( table.getPrimaryKey().sqlConstraintString( dialect ) );
|
||||
createTable.append( ", " ).append( table.getPrimaryKey().sqlConstraintString( dialect ) );
|
||||
}
|
||||
|
||||
createTable.append( dialect.getUniqueDelegate().getTableCreationUniqueConstraintsFragment( table, context ) );
|
||||
|
@ -168,20 +83,108 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
|
||||
applyTableTypeString( createTable );
|
||||
|
||||
List<String> sqlStrings = new ArrayList<>();
|
||||
final List<String> sqlStrings = new ArrayList<>();
|
||||
sqlStrings.add( createTable.toString() );
|
||||
|
||||
applyComments( table, formattedTableName, sqlStrings );
|
||||
|
||||
applyInitCommands( table, sqlStrings, context );
|
||||
|
||||
return sqlStrings.toArray(StringHelper.EMPTY_STRINGS);
|
||||
return sqlStrings.toArray( EMPTY_STRINGS );
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException( "Error creating SQL create commands for table : " + tableName, e );
|
||||
}
|
||||
}
|
||||
|
||||
static void appendColumn(
|
||||
StringBuilder statement,
|
||||
Column column,
|
||||
Table table,
|
||||
Metadata metadata,
|
||||
Dialect dialect,
|
||||
SqlStringGenerationContext context) {
|
||||
|
||||
statement.append( column.getQuotedName( dialect ) );
|
||||
|
||||
final String columnType = column.getSqlType( metadata.getDatabase().getTypeConfiguration(), dialect, metadata );
|
||||
if ( isIdentityColumn( column, table, metadata, dialect ) ) {
|
||||
// to support dialects that have their own identity data type
|
||||
if ( dialect.getIdentityColumnSupport().hasDataTypeInIdentityColumn() ) {
|
||||
statement.append( ' ' ).append( columnType );
|
||||
}
|
||||
final String identityColumnString = dialect.getIdentityColumnSupport()
|
||||
.getIdentityColumnString( column.getSqlTypeCode( metadata ) );
|
||||
statement.append( ' ' ).append( identityColumnString );
|
||||
}
|
||||
else {
|
||||
if ( column.hasSpecializedTypeDeclaration() ) {
|
||||
statement.append( ' ' ).append( column.getSpecializedTypeDeclaration() );
|
||||
}
|
||||
else if ( column.getGeneratedAs() == null || dialect.hasDataTypeBeforeGeneratedAs() ) {
|
||||
statement.append( ' ' ).append( columnType );
|
||||
}
|
||||
|
||||
final String defaultValue = column.getDefaultValue();
|
||||
if ( defaultValue != null ) {
|
||||
statement.append( " default " ).append( defaultValue );
|
||||
}
|
||||
|
||||
final String generatedAs = column.getGeneratedAs();
|
||||
if ( generatedAs != null) {
|
||||
statement.append( dialect.generatedAs( generatedAs ) );
|
||||
}
|
||||
|
||||
if ( column.isNullable() ) {
|
||||
statement.append( dialect.getNullColumnString(columnType) );
|
||||
}
|
||||
else {
|
||||
statement.append( " not null" );
|
||||
}
|
||||
}
|
||||
|
||||
if ( column.isUnique() && !table.isPrimaryKey(column) ) {
|
||||
final String keyName = Constraint.generateName( "UK_", table, column);
|
||||
final UniqueKey uk = table.getOrCreateUniqueKey( keyName );
|
||||
uk.addColumn(column);
|
||||
statement.append(
|
||||
dialect.getUniqueDelegate().getColumnDefinitionUniquenessFragment( column, context )
|
||||
);
|
||||
}
|
||||
|
||||
if ( dialect.supportsColumnCheck() && column.hasCheckConstraint() ) {
|
||||
statement.append( column.checkConstraint() );
|
||||
}
|
||||
|
||||
final String columnComment = column.getComment();
|
||||
if ( columnComment != null ) {
|
||||
statement.append( dialect.getColumnComment( columnComment ) );
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isIdentityColumn(Column column, Table table, Metadata metadata, Dialect dialect) {
|
||||
// Try to find out the name of the primary key in case the dialect needs it to create an identity
|
||||
return isPrimaryKeyIdentity( table, metadata, dialect )
|
||||
&& column.getQuotedName( dialect ).equals( getPrimaryKeyColumnName( table, dialect ) );
|
||||
}
|
||||
|
||||
private static String getPrimaryKeyColumnName(Table table, Dialect dialect) {
|
||||
return table.hasPrimaryKey()
|
||||
? table.getPrimaryKey().getColumns().get(0).getQuotedName( dialect )
|
||||
: null;
|
||||
}
|
||||
|
||||
private static boolean isPrimaryKeyIdentity(Table table, Metadata metadata, Dialect dialect) {
|
||||
// TODO: this is the much better form moving forward as we move to metamodel
|
||||
//return hasPrimaryKey
|
||||
// && table.getPrimaryKey().getColumnSpan() == 1
|
||||
// && table.getPrimaryKey().getColumn( 0 ).isIdentity();
|
||||
MetadataImplementor metadataImplementor = (MetadataImplementor) metadata;
|
||||
return table.hasPrimaryKey()
|
||||
&& table.getIdentifierValue() != null
|
||||
&& table.getIdentifierValue().isIdentityColumn(
|
||||
metadataImplementor.getMetadataBuildingOptions().getIdentifierGeneratorFactory(),
|
||||
dialect
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param table The table.
|
||||
* @param tableName The qualified table name.
|
||||
|
@ -202,12 +205,16 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
protected void applyComments(Table table, String formattedTableName, List<String> sqlStrings) {
|
||||
if ( dialect.supportsCommentOn() ) {
|
||||
if ( table.getComment() != null ) {
|
||||
sqlStrings.add( "comment on table " + formattedTableName + " is '" + table.getComment() + "'" );
|
||||
sqlStrings.add( "comment on table "
|
||||
+ formattedTableName
|
||||
+ " is '" + table.getComment() + "'" );
|
||||
}
|
||||
for ( Column column : table.getColumns() ) {
|
||||
String columnComment = column.getComment();
|
||||
if ( columnComment != null ) {
|
||||
sqlStrings.add( "comment on column " + formattedTableName + '.' + column.getQuotedName( dialect ) + " is '" + columnComment + "'" );
|
||||
sqlStrings.add( "comment on column "
|
||||
+ formattedTableName + '.' + column.getQuotedName( dialect )
|
||||
+ " is '" + columnComment + "'" );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -215,7 +222,7 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
|
||||
protected void applyInitCommands(Table table, List<String> sqlStrings, SqlStringGenerationContext context) {
|
||||
for ( InitCommand initCommand : table.getInitCommands( context ) ) {
|
||||
Collections.addAll( sqlStrings, initCommand.getInitCommands() );
|
||||
addAll( sqlStrings, initCommand.getInitCommands() );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,9 +233,7 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
protected void applyTableCheck(Table table, StringBuilder buf) {
|
||||
if ( dialect.supportsTableCheck() ) {
|
||||
for (String constraint : table.getCheckConstraints() ) {
|
||||
buf.append( ", check (" )
|
||||
.append( constraint )
|
||||
.append( ')' );
|
||||
buf.append( ", check (" ).append( constraint ).append( ')' );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -244,19 +249,19 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
if ( dialect.supportsIfExistsBeforeTableName() ) {
|
||||
buf.append( "if exists " );
|
||||
}
|
||||
buf.append( context.format( getTableName( table ) ) )
|
||||
.append( dialect.getCascadeConstraintsString() );
|
||||
if ( dialect.supportsIfExistsAfterTableName() ) {
|
||||
buf.append( " if exists" );
|
||||
}
|
||||
return new String[] { buf.toString() };
|
||||
}
|
||||
|
||||
final QualifiedName tableName = new QualifiedNameParser.NameParts(
|
||||
private static QualifiedName getTableName(Table table) {
|
||||
return new QualifiedNameParser.NameParts(
|
||||
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
|
||||
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
|
||||
table.getNameIdentifier()
|
||||
);
|
||||
buf.append( context.format( tableName ) )
|
||||
.append( dialect.getCascadeConstraintsString() );
|
||||
|
||||
if ( dialect.supportsIfExistsAfterTableName() ) {
|
||||
buf.append( " if exists" );
|
||||
}
|
||||
|
||||
return new String[] { buf.toString() };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.tool.schema.internal;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.Incubating;
|
||||
import org.hibernate.Internal;
|
||||
import org.hibernate.boot.Metadata;
|
||||
import org.hibernate.boot.model.naming.Identifier;
|
||||
import org.hibernate.boot.model.relational.QualifiedTableName;
|
||||
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.mapping.Column;
|
||||
import org.hibernate.mapping.Table;
|
||||
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
|
||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hibernate.internal.util.collections.ArrayHelper.EMPTY_STRING_ARRAY;
|
||||
import static org.hibernate.tool.schema.internal.StandardTableExporter.appendColumn;
|
||||
|
||||
/**
|
||||
* A {@link TableMigrator} that only knows how to add new columns.
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
@Incubating
|
||||
public class StandardTableMigrator implements TableMigrator {
|
||||
|
||||
private static final Logger log = Logger.getLogger( Table.class );
|
||||
|
||||
protected final Dialect dialect;
|
||||
|
||||
public StandardTableMigrator(Dialect dialect) {
|
||||
this.dialect = dialect;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getSqlAlterStrings(
|
||||
Table table,
|
||||
Metadata metadata,
|
||||
TableInformation tableInfo,
|
||||
SqlStringGenerationContext sqlStringGenerationContext) {
|
||||
return sqlAlterStrings( table, dialect, metadata, tableInfo, sqlStringGenerationContext )
|
||||
.toArray( EMPTY_STRING_ARRAY );
|
||||
}
|
||||
|
||||
@Internal
|
||||
public static List<String> sqlAlterStrings(
|
||||
Table table,
|
||||
Dialect dialect,
|
||||
Metadata metadata,
|
||||
TableInformation tableInfo,
|
||||
SqlStringGenerationContext sqlStringGenerationContext) throws HibernateException {
|
||||
|
||||
final String tableName = sqlStringGenerationContext.format( new QualifiedTableName(
|
||||
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
|
||||
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
|
||||
table.getNameIdentifier() )
|
||||
);
|
||||
|
||||
final StringBuilder root = new StringBuilder( dialect.getAlterTableString( tableName ) )
|
||||
.append( ' ' )
|
||||
.append( dialect.getAddColumnString() );
|
||||
|
||||
final List<String> results = new ArrayList<>();
|
||||
|
||||
for ( Column column : table.getColumns() ) {
|
||||
final ColumnInformation columnInfo = tableInfo.getColumn(
|
||||
Identifier.toIdentifier( column.getName(), column.isQuoted() )
|
||||
);
|
||||
if ( columnInfo == null ) {
|
||||
// the column doesn't exist at all.
|
||||
final StringBuilder alterTable = new StringBuilder( root.toString() ).append( ' ' );
|
||||
appendColumn( alterTable, column, table, metadata, dialect, sqlStringGenerationContext );
|
||||
alterTable.append( dialect.getAddColumnSuffixString() );
|
||||
results.add( alterTable.toString() );
|
||||
}
|
||||
}
|
||||
|
||||
if ( results.isEmpty() ) {
|
||||
log.debugf( "No alter strings for table : %s", table.getQuotedName() );
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.tool.schema.internal;
|
||||
|
||||
import org.hibernate.boot.Metadata;
|
||||
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
|
||||
import org.hibernate.mapping.Table;
|
||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||
|
||||
/**
|
||||
* An object that produces an {@code alter table} statements
|
||||
* needed to update the definition of a table.
|
||||
*
|
||||
* @author Gavin King
|
||||
*
|
||||
* @since 6.2
|
||||
*/
|
||||
public interface TableMigrator {
|
||||
String[] getSqlAlterStrings(
|
||||
Table table,
|
||||
Metadata metadata,
|
||||
TableInformation tableInfo,
|
||||
SqlStringGenerationContext sqlStringGenerationContext);
|
||||
}
|
|
@ -18,6 +18,8 @@ import java.util.Collection;
|
|||
* An object that produces the SQL required to truncate the tables in a schema.
|
||||
*
|
||||
* @author Gavin King
|
||||
*
|
||||
* @since 6.2
|
||||
*/
|
||||
@Incubating
|
||||
public interface Cleaner {
|
||||
|
|
Loading…
Reference in New Issue