HHH-11023 - Startup performance regression : schema update/validate
This commit is contained in:
parent
a99638372d
commit
de3153a8e1
|
@ -601,6 +601,14 @@ If this property is not supplied (or is explicitly `false`), the provider should
|
||||||
Used to specify the `org.hibernate.tool.schema.spi.SchemaFilterProvider` to be used by `create`, `drop`, `migrate`, and `validate` operations on the database schema.
|
Used to specify the `org.hibernate.tool.schema.spi.SchemaFilterProvider` to be used by `create`, `drop`, `migrate`, and `validate` operations on the database schema.
|
||||||
`SchemaFilterProvider` provides filters that can be used to limit the scope of these operations to specific namespaces, tables and sequences. All objects are included by default.
|
`SchemaFilterProvider` provides filters that can be used to limit the scope of these operations to specific namespaces, tables and sequences. All objects are included by default.
|
||||||
|
|
||||||
|
|`hibernate.hbm2ddl.jdbc_metadata_extraction_strategy` |`grouped` (default value) or `individually` a|
|
||||||
|
|
||||||
|
Setting to choose the strategy used to access the JDBC Metadata.
|
||||||
|
Valid options are defined by the `strategy` value of the `org.hibernate.tool.schema.JdbcMetadaAccessStrategy` enum:
|
||||||
|
|
||||||
|
`grouped`:: `org.hibernate.tool.schema.spi.SchemaMigrator` and `org.hibernate.tool.schema.spi.SchemaValidator` execute a single `java.sql.DatabaseMetaData#getTables(String, String, String, String[])` call to retrieve all the database table in order to determine if all the `javax.persistence.Entity` have a corresponding mapped database tables.
|
||||||
|
`individually`:: `org.hibernate.tool.schema.spi.SchemaMigrator` and `org.hibernate.tool.schema.spi.SchemaValidator` execute one `java.sql.DatabaseMetaData#getTables(String, String, String, String[])` call for each `javax.persistence.Entity` in order to determine if a corresponding database table exists.
|
||||||
|
|
||||||
|`hibernate.hbm2ddl.delimiter` | `;` |Identifies the delimiter to use to separate schema management statements in script outputs.
|
|`hibernate.hbm2ddl.delimiter` | `;` |Identifies the delimiter to use to separate schema management statements in script outputs.
|
||||||
|
|
||||||
|`hibernate.schema_management_tool` |A schema name |Used to specify the `org.hibernate.tool.schema.spi.SchemaManagementTool` to use for performing schema management. The default is to use `org.hibernate.tool.schema.internal.HibernateSchemaManagementTool`
|
|`hibernate.schema_management_tool` |A schema name |Used to specify the `org.hibernate.tool.schema.spi.SchemaManagementTool` to use for performing schema management. The default is to use `org.hibernate.tool.schema.internal.HibernateSchemaManagementTool`
|
||||||
|
|
|
@ -10,6 +10,7 @@ import org.hibernate.boot.MetadataBuilder;
|
||||||
import org.hibernate.query.internal.ParameterMetadataImpl;
|
import org.hibernate.query.internal.ParameterMetadataImpl;
|
||||||
import org.hibernate.resource.transaction.spi.TransactionCoordinator;
|
import org.hibernate.resource.transaction.spi.TransactionCoordinator;
|
||||||
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
|
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
import org.hibernate.tool.schema.SourceType;
|
import org.hibernate.tool.schema.SourceType;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1310,6 +1311,15 @@ public interface AvailableSettings {
|
||||||
*/
|
*/
|
||||||
String HBM2DDL_FILTER_PROVIDER = "hibernate.hbm2ddl.schema_filter_provider";
|
String HBM2DDL_FILTER_PROVIDER = "hibernate.hbm2ddl.schema_filter_provider";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setting to choose the strategy used to access the JDBC Metadata.
|
||||||
|
*
|
||||||
|
* Valid options are defined by the {@link JdbcMetadaAccessStrategy} enum.
|
||||||
|
*
|
||||||
|
* @see JdbcMetadaAccessStrategy
|
||||||
|
*/
|
||||||
|
String HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY = "hibernate.hbm2ddl.jdbc_metadata_extraction_strategy";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Identifies the delimiter to use to separate schema management statements in script outputs
|
* Identifies the delimiter to use to separate schema management statements in script outputs
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -14,6 +14,7 @@ import java.util.Iterator;
|
||||||
|
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.boot.Metadata;
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.naming.Identifier;
|
||||||
import org.hibernate.boot.model.relational.Exportable;
|
import org.hibernate.boot.model.relational.Exportable;
|
||||||
import org.hibernate.dialect.Dialect;
|
import org.hibernate.dialect.Dialect;
|
||||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||||
|
@ -29,13 +30,13 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
||||||
private Table table;
|
private Table table;
|
||||||
private java.util.List<Column> columns = new ArrayList<Column>();
|
private java.util.List<Column> columns = new ArrayList<Column>();
|
||||||
private java.util.Map<Column, String> columnOrderMap = new HashMap<Column, String>( );
|
private java.util.Map<Column, String> columnOrderMap = new HashMap<Column, String>( );
|
||||||
private String name;
|
private Identifier name;
|
||||||
|
|
||||||
public String sqlCreateString(Dialect dialect, Mapping mapping, String defaultCatalog, String defaultSchema)
|
public String sqlCreateString(Dialect dialect, Mapping mapping, String defaultCatalog, String defaultSchema)
|
||||||
throws HibernateException {
|
throws HibernateException {
|
||||||
return buildSqlCreateIndexString(
|
return buildSqlCreateIndexString(
|
||||||
dialect,
|
dialect,
|
||||||
getName(),
|
getQuotedName( dialect ),
|
||||||
getTable(),
|
getTable(),
|
||||||
getColumnIterator(),
|
getColumnIterator(),
|
||||||
columnOrderMap,
|
columnOrderMap,
|
||||||
|
@ -171,7 +172,7 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
||||||
return "drop index " +
|
return "drop index " +
|
||||||
StringHelper.qualify(
|
StringHelper.qualify(
|
||||||
table.getQualifiedName( dialect, defaultCatalog, defaultSchema ),
|
table.getQualifiedName( dialect, defaultCatalog, defaultSchema ),
|
||||||
name
|
getQuotedName( dialect )
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,11 +220,15 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return name;
|
return name == null ? null : name.getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setName(String name) {
|
public void setName(String name) {
|
||||||
this.name = name;
|
this.name = Identifier.toIdentifier( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getQuotedName(Dialect dialect) {
|
||||||
|
return name == null ? null : name.render( dialect );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1,57 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema;
|
||||||
|
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.internal.util.StringHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*/
|
||||||
|
public enum JdbcMetadaAccessStrategy {
|
||||||
|
/**
|
||||||
|
* The {@link org.hibernate.tool.schema.spi.SchemaMigrator} and {@link org.hibernate.tool.schema.spi.SchemaValidator}
|
||||||
|
* execute one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call for each
|
||||||
|
* {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
|
||||||
|
*/
|
||||||
|
INDIVIDUALLY( "individually" ),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link org.hibernate.tool.schema.spi.SchemaMigrator} and {@link org.hibernate.tool.schema.spi.SchemaValidator}
|
||||||
|
* execute a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
|
||||||
|
* to retrieve all the database table in order to determine all the {@link javax.persistence.Entity} have a mapped database tables.
|
||||||
|
*/
|
||||||
|
GROUPED( "grouped" );
|
||||||
|
|
||||||
|
private final String strategy;
|
||||||
|
|
||||||
|
JdbcMetadaAccessStrategy(String strategy) {
|
||||||
|
this.strategy = strategy;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return strategy;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JdbcMetadaAccessStrategy interpretHbm2ddlSetting(Object value) {
|
||||||
|
if(value == null){
|
||||||
|
return GROUPED;
|
||||||
|
}
|
||||||
|
String name = value.toString();
|
||||||
|
if ( StringHelper.isEmpty( name ) || GROUPED.strategy.equals( name ) ) {
|
||||||
|
return GROUPED;
|
||||||
|
}
|
||||||
|
else if ( INDIVIDUALLY.strategy.equals( name ) ) {
|
||||||
|
return INDIVIDUALLY;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new IllegalArgumentException( "Unrecognized `" + AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY + "` value : " + name );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -14,65 +14,55 @@ import org.hibernate.boot.model.naming.Identifier;
|
||||||
import org.hibernate.boot.model.relational.Namespace;
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
import org.hibernate.boot.model.relational.QualifiedSequenceName;
|
import org.hibernate.boot.model.relational.QualifiedSequenceName;
|
||||||
import org.hibernate.boot.model.relational.QualifiedTableName;
|
import org.hibernate.boot.model.relational.QualifiedTableName;
|
||||||
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
|
|
||||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||||
|
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
||||||
import org.hibernate.service.ServiceRegistry;
|
import org.hibernate.service.ServiceRegistry;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
||||||
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
|
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
|
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Access to information from the existing database.
|
|
||||||
*
|
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class DatabaseInformationImpl implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
|
public class DatabaseInformationImpl
|
||||||
|
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
|
||||||
private final JdbcEnvironment jdbcEnvironment;
|
private final JdbcEnvironment jdbcEnvironment;
|
||||||
private final ExtractionContext extractionContext;
|
private final ImprovedExtractionContextImpl extractionContext;
|
||||||
private final InformationExtractor extractor;
|
private final InformationExtractor extractor;
|
||||||
|
|
||||||
private final Map<QualifiedSequenceName,SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
|
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
|
||||||
|
|
||||||
public DatabaseInformationImpl(
|
|
||||||
JdbcEnvironment jdbcEnvironment,
|
|
||||||
InformationExtractor extractor,
|
|
||||||
ExtractionContext extractionContext) throws SQLException {
|
|
||||||
this.jdbcEnvironment = jdbcEnvironment;
|
|
||||||
this.extractionContext = extractionContext;
|
|
||||||
this.extractor = extractor;
|
|
||||||
|
|
||||||
// legacy code did initialize sequences...
|
|
||||||
initializeSequences();
|
|
||||||
}
|
|
||||||
|
|
||||||
public DatabaseInformationImpl(
|
public DatabaseInformationImpl(
|
||||||
ServiceRegistry serviceRegistry,
|
ServiceRegistry serviceRegistry,
|
||||||
JdbcEnvironment jdbcEnvironment,
|
JdbcEnvironment jdbcEnvironment,
|
||||||
JdbcConnectionAccess jdbcConnectionAccess,
|
DdlTransactionIsolator ddlTransactionIsolator,
|
||||||
Identifier defaultCatalogName,
|
Namespace.Name defaultNamespace) throws SQLException {
|
||||||
Identifier defaultSchemaName) throws SQLException {
|
|
||||||
this.jdbcEnvironment = jdbcEnvironment;
|
this.jdbcEnvironment = jdbcEnvironment;
|
||||||
|
|
||||||
this.extractionContext = new ExtractionContextImpl(
|
this.extractionContext = new ImprovedExtractionContextImpl(
|
||||||
serviceRegistry,
|
serviceRegistry,
|
||||||
jdbcEnvironment,
|
jdbcEnvironment,
|
||||||
jdbcConnectionAccess,
|
ddlTransactionIsolator,
|
||||||
this,
|
defaultNamespace.getCatalog(),
|
||||||
defaultCatalogName,
|
defaultNamespace.getSchema(),
|
||||||
defaultSchemaName
|
this
|
||||||
);
|
);
|
||||||
|
|
||||||
// todo : make this pluggable
|
// todo : make this pluggable
|
||||||
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
|
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
|
||||||
|
|
||||||
// legacy code did initialize sequences...
|
// because we do not have defined a way to locate sequence info by name
|
||||||
initializeSequences();
|
initializeSequences();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initializeSequences() throws SQLException {
|
private void initializeSequences() throws SQLException {
|
||||||
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect().getSequenceInformationExtractor().extractMetadata( extractionContext );
|
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect()
|
||||||
|
.getSequenceInformationExtractor()
|
||||||
|
.extractMetadata( extractionContext );
|
||||||
for ( SequenceInformation sequenceInformation : itr ) {
|
for ( SequenceInformation sequenceInformation : itr ) {
|
||||||
sequenceInformationMap.put(
|
sequenceInformationMap.put(
|
||||||
// for now, follow the legacy behavior of storing just the
|
// for now, follow the legacy behavior of storing just the
|
||||||
|
@ -88,8 +78,13 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean schemaExists(Namespace.Name schema) {
|
public boolean catalogExists(Identifier catalog) {
|
||||||
return extractor.schemaExists( schema.getCatalog(), schema.getSchema() );
|
return extractor.catalogExists( catalog );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean schemaExists(Namespace.Name namespace) {
|
||||||
|
return extractor.schemaExists( namespace.getCatalog(), namespace.getSchema() );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -102,24 +97,29 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TableInformation getTableInformation(
|
public TableInformation getTableInformation(
|
||||||
Namespace.Name schemaName,
|
Namespace.Name namespace,
|
||||||
Identifier tableName) {
|
Identifier tableName) {
|
||||||
return getTableInformation( new QualifiedTableName( schemaName, tableName ) );
|
return getTableInformation( new QualifiedTableName( namespace, tableName ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TableInformation getTableInformation(QualifiedTableName qualifiedTableName) {
|
public TableInformation getTableInformation(QualifiedTableName tableName) {
|
||||||
if ( qualifiedTableName.getObjectName() == null ) {
|
if ( tableName.getObjectName() == null ) {
|
||||||
throw new IllegalArgumentException( "Passed table name cannot be null" );
|
throw new IllegalArgumentException( "Passed table name cannot be null" );
|
||||||
}
|
}
|
||||||
|
|
||||||
return extractor.getTable(
|
return extractor.getTable(
|
||||||
qualifiedTableName.getCatalogName(),
|
tableName.getCatalogName(),
|
||||||
qualifiedTableName.getSchemaName(),
|
tableName.getSchemaName(),
|
||||||
qualifiedTableName.getTableName()
|
tableName.getTableName()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NameSpaceTablesInformation getTablesInformation(Namespace namespace) {
|
||||||
|
return extractor.getTables( namespace.getPhysicalName().getCatalog(), namespace.getPhysicalName().getSchema() );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceInformation getSequenceInformation(
|
public SequenceInformation getSequenceInformation(
|
||||||
Identifier catalogName,
|
Identifier catalogName,
|
||||||
|
@ -129,20 +129,18 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceInformation getSequenceInformation(
|
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName) {
|
||||||
Namespace.Name schemaName,
|
|
||||||
Identifier sequenceName) {
|
|
||||||
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
|
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceInformation getSequenceInformation(QualifiedSequenceName qualifiedSequenceName) {
|
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName) {
|
||||||
return locateSequenceInformation( qualifiedSequenceName );
|
return locateSequenceInformation( sequenceName );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean catalogExists(Identifier catalog) {
|
public void cleanup() {
|
||||||
return extractor.catalogExists( catalog );
|
extractionContext.cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -159,9 +157,4 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
|
||||||
|
|
||||||
return sequenceInformationMap.get( sequenceName );
|
return sequenceInformationMap.get( sequenceName );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void cleanup() {
|
|
||||||
extractionContext.cleanup();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
||||||
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
|
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.PrimaryKeyInformation;
|
import org.hibernate.tool.schema.extract.spi.PrimaryKeyInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.SchemaExtractionException;
|
import org.hibernate.tool.schema.extract.spi.SchemaExtractionException;
|
||||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
@ -192,29 +193,18 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataSchemaName( identifierToUse );
|
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataSchemaName( identifierToUse );
|
||||||
}
|
}
|
||||||
|
|
||||||
public TableInformation extractTableInformation(
|
private TableInformation extractTableInformation(ResultSet resultSet) throws SQLException {
|
||||||
Identifier catalog,
|
final QualifiedTableName tableName = extractTableName( resultSet );
|
||||||
Identifier schema,
|
|
||||||
Identifier name,
|
|
||||||
ResultSet resultSet) throws SQLException {
|
|
||||||
if ( catalog == null ) {
|
|
||||||
catalog = identifierHelper().toIdentifier( resultSet.getString( "TABLE_CAT" ) );
|
|
||||||
}
|
|
||||||
if ( schema == null ) {
|
|
||||||
schema = identifierHelper().toIdentifier( resultSet.getString( "TABLE_SCHEM" ) );
|
|
||||||
}
|
|
||||||
if ( name == null ) {
|
|
||||||
name = identifierHelper().toIdentifier( resultSet.getString( "TABLE_NAME" ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
final QualifiedTableName tableName = new QualifiedTableName( catalog, schema, name );
|
final TableInformationImpl tableInformation = new TableInformationImpl(
|
||||||
|
|
||||||
return new TableInformationImpl(
|
|
||||||
this,
|
this,
|
||||||
|
identifierHelper(),
|
||||||
tableName,
|
tableName,
|
||||||
isPhysicalTableType( resultSet.getString( "TABLE_TYPE" ) ),
|
isPhysicalTableType( resultSet.getString( "TABLE_TYPE" ) ),
|
||||||
resultSet.getString( "REMARKS" )
|
resultSet.getString( "REMARKS" )
|
||||||
);
|
);
|
||||||
|
addColumns( tableInformation );
|
||||||
|
return tableInformation;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -247,7 +237,6 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// 2) look in default namespace
|
// 2) look in default namespace
|
||||||
if ( extractionContext.getDefaultCatalog() != null || extractionContext.getDefaultSchema() != null ) {
|
if ( extractionContext.getDefaultCatalog() != null || extractionContext.getDefaultSchema() != null ) {
|
||||||
tableInfo = locateTableInNamespace(
|
tableInfo = locateTableInNamespace(
|
||||||
|
@ -261,7 +250,6 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// 3) look in all namespaces
|
// 3) look in all namespaces
|
||||||
try {
|
try {
|
||||||
final String tableNameFilter = toMetaDataObjectName( tableName );
|
final String tableNameFilter = toMetaDataObjectName( tableName );
|
||||||
|
@ -274,7 +262,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return processGetTableResults(
|
return processTableResults(
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
tableName,
|
tableName,
|
||||||
|
@ -295,6 +283,130 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema) {
|
||||||
|
|
||||||
|
String catalogFilter = null;
|
||||||
|
String schemaFilter = null;
|
||||||
|
|
||||||
|
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
|
||||||
|
if ( catalog == null ) {
|
||||||
|
if ( extractionContext.getJdbcEnvironment().getCurrentCatalog() != null ) {
|
||||||
|
// 1) look in current namespace
|
||||||
|
catalogFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentCatalog() );
|
||||||
|
}
|
||||||
|
else if ( extractionContext.getDefaultCatalog() != null ) {
|
||||||
|
// 2) look in default namespace
|
||||||
|
catalogFilter = toMetaDataObjectName( extractionContext.getDefaultCatalog() );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
catalogFilter = "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
catalogFilter = toMetaDataObjectName( catalog );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
|
||||||
|
if ( schema == null ) {
|
||||||
|
if ( extractionContext.getJdbcEnvironment().getCurrentSchema() != null ) {
|
||||||
|
// 1) look in current namespace
|
||||||
|
schemaFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentSchema() );
|
||||||
|
}
|
||||||
|
else if ( extractionContext.getDefaultSchema() != null ) {
|
||||||
|
// 2) look in default namespace
|
||||||
|
schemaFilter = toMetaDataObjectName( extractionContext.getDefaultSchema() );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
schemaFilter = "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
schemaFilter = toMetaDataObjectName( schema );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getTables(
|
||||||
|
catalogFilter,
|
||||||
|
schemaFilter,
|
||||||
|
"%",
|
||||||
|
tableTypes
|
||||||
|
);
|
||||||
|
|
||||||
|
final NameSpaceTablesInformation tablesInformation = processTableResults( resultSet );
|
||||||
|
populateTablesWithColumns( catalogFilter, schemaFilter, tablesInformation );
|
||||||
|
return tablesInformation;
|
||||||
|
}
|
||||||
|
catch (SQLException sqlException) {
|
||||||
|
throw convertSQLException( sqlException, "Error accessing table metadata" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void populateTablesWithColumns(
|
||||||
|
String catalogFilter,
|
||||||
|
String schemaFilter,
|
||||||
|
NameSpaceTablesInformation tables) {
|
||||||
|
try {
|
||||||
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
|
||||||
|
catalogFilter,
|
||||||
|
schemaFilter,
|
||||||
|
null,
|
||||||
|
"%"
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
String currentTableName = "";
|
||||||
|
TableInformation currentTable = null;
|
||||||
|
while ( resultSet.next() ) {
|
||||||
|
if ( !currentTableName.equals( resultSet.getString( "TABLE_NAME" ) ) ) {
|
||||||
|
currentTableName = resultSet.getString( "TABLE_NAME" );
|
||||||
|
currentTable = tables.getTableInformation( currentTableName );
|
||||||
|
}
|
||||||
|
if ( currentTable != null ) {
|
||||||
|
final ColumnInformationImpl columnInformation = new ColumnInformationImpl(
|
||||||
|
currentTable,
|
||||||
|
DatabaseIdentifier.toIdentifier( resultSet.getString( "COLUMN_NAME" ) ),
|
||||||
|
resultSet.getInt( "DATA_TYPE" ),
|
||||||
|
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
|
||||||
|
resultSet.getInt( "COLUMN_SIZE" ),
|
||||||
|
resultSet.getInt( "DECIMAL_DIGITS" ),
|
||||||
|
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
|
||||||
|
);
|
||||||
|
currentTable.addColumn( columnInformation );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
resultSet.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (SQLException e) {
|
||||||
|
throw convertSQLException(
|
||||||
|
e,
|
||||||
|
"Error accessing tables metadata"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private NameSpaceTablesInformation processTableResults(ResultSet resultSet) throws SQLException {
|
||||||
|
try {
|
||||||
|
NameSpaceTablesInformation tables = new NameSpaceTablesInformation(identifierHelper());
|
||||||
|
while ( resultSet.next() ) {
|
||||||
|
final TableInformation tableInformation = extractTableInformation( resultSet );
|
||||||
|
tables.addTableInformation( tableInformation );
|
||||||
|
}
|
||||||
|
|
||||||
|
return tables;
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
try {
|
||||||
|
resultSet.close();
|
||||||
|
}
|
||||||
|
catch (SQLException ignore) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private TableInformation locateTableInNamespace(
|
private TableInformation locateTableInNamespace(
|
||||||
Identifier catalog,
|
Identifier catalog,
|
||||||
Identifier schema,
|
Identifier schema,
|
||||||
|
@ -341,7 +453,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
tableTypes
|
tableTypes
|
||||||
);
|
);
|
||||||
|
|
||||||
return processGetTableResults(
|
return processTableResults(
|
||||||
catalogToUse,
|
catalogToUse,
|
||||||
schemaToUse,
|
schemaToUse,
|
||||||
tableName,
|
tableName,
|
||||||
|
@ -353,7 +465,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private TableInformation processGetTableResults(
|
private TableInformation processTableResults(
|
||||||
Identifier catalog,
|
Identifier catalog,
|
||||||
Identifier schema,
|
Identifier schema,
|
||||||
Identifier tableName,
|
Identifier tableName,
|
||||||
|
@ -362,7 +474,8 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
boolean found = false;
|
boolean found = false;
|
||||||
TableInformation tableInformation = null;
|
TableInformation tableInformation = null;
|
||||||
while ( resultSet.next() ) {
|
while ( resultSet.next() ) {
|
||||||
if ( tableName.equals( Identifier.toIdentifier( resultSet.getString( "TABLE_NAME" ), tableName.isQuoted() ) ) ) {
|
if ( tableName.equals( Identifier.toIdentifier( resultSet.getString( "TABLE_NAME" ),
|
||||||
|
tableName.isQuoted() ) ) ) {
|
||||||
if ( found ) {
|
if ( found ) {
|
||||||
log.multipleTablesFound( tableName.render() );
|
log.multipleTablesFound( tableName.render() );
|
||||||
final String catalogName = catalog == null ? "" : catalog.render();
|
final String catalogName = catalog == null ? "" : catalog.render();
|
||||||
|
@ -379,13 +492,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
found = true;
|
found = true;
|
||||||
tableInformation = extractTableInformation(
|
tableInformation = extractTableInformation( resultSet );
|
||||||
catalog,
|
|
||||||
schema,
|
|
||||||
tableName,
|
|
||||||
resultSet
|
|
||||||
);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -420,69 +527,60 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private void addColumns(TableInformation tableInformation) {
|
||||||
public ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier) {
|
final QualifiedTableName tableName = tableInformation.getName();
|
||||||
final Identifier catalog = tableInformation.getName().getCatalogName();
|
final Identifier catalog = tableName.getCatalogName();
|
||||||
final Identifier schema = tableInformation.getName().getSchemaName();
|
final Identifier schema = tableName.getSchemaName();
|
||||||
|
|
||||||
final String catalogFilter;
|
final String catalogFilter;
|
||||||
final String schemaFilter;
|
final String schemaFilter;
|
||||||
|
|
||||||
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
|
|
||||||
if ( catalog == null ) {
|
if ( catalog == null ) {
|
||||||
catalogFilter = "";
|
catalogFilter = "";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
catalogFilter = toMetaDataObjectName( catalog );
|
catalogFilter = catalog.getText();
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
catalogFilter = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
|
|
||||||
if ( schema == null ) {
|
if ( schema == null ) {
|
||||||
schemaFilter = "";
|
schemaFilter = "";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
schemaFilter = toMetaDataObjectName( schema );
|
schemaFilter = schema.getText();
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
schemaFilter = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final String tableFilter = toMetaDataObjectName( tableInformation.getName().getTableName() );
|
|
||||||
final String columnFilter = toMetaDataObjectName( columnIdentifier );
|
|
||||||
try {
|
try {
|
||||||
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
|
||||||
catalogFilter,
|
catalogFilter,
|
||||||
schemaFilter,
|
schemaFilter,
|
||||||
tableFilter,
|
tableName.getTableName().getText(),
|
||||||
columnFilter
|
"%"
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if ( !resultSet.next() ) {
|
while ( resultSet.next() ) {
|
||||||
return null;
|
final String columnName = resultSet.getString( "COLUMN_NAME" );
|
||||||
}
|
final ColumnInformationImpl columnInformation = new ColumnInformationImpl(
|
||||||
return new ColumnInformationImpl(
|
|
||||||
tableInformation,
|
tableInformation,
|
||||||
identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) ),
|
DatabaseIdentifier.toIdentifier( columnName ),
|
||||||
resultSet.getInt( "DATA_TYPE" ),
|
resultSet.getInt( "DATA_TYPE" ),
|
||||||
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
|
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
|
||||||
resultSet.getInt( "COLUMN_SIZE" ),
|
resultSet.getInt( "COLUMN_SIZE" ),
|
||||||
resultSet.getInt( "DECIMAL_DIGITS" ),
|
resultSet.getInt( "DECIMAL_DIGITS" ),
|
||||||
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
|
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
|
||||||
);
|
);
|
||||||
|
tableInformation.addColumn( columnInformation );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
resultSet.close();
|
resultSet.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (SQLException e) {
|
catch (SQLException e) {
|
||||||
throw convertSQLException( e, "Error accessing column metadata: " + tableInformation.getName().toString() );
|
throw convertSQLException(
|
||||||
|
e,
|
||||||
|
"Error accessing column metadata: " + tableName.toString()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -498,11 +596,32 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation) {
|
public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation) {
|
||||||
|
final QualifiedTableName tableName = tableInformation.getName();
|
||||||
|
final Identifier catalog = tableName.getCatalogName();
|
||||||
|
final Identifier schema = tableName.getSchemaName();
|
||||||
|
|
||||||
|
final String catalogFilter;
|
||||||
|
final String schemaFilter;
|
||||||
|
|
||||||
|
if ( catalog == null ) {
|
||||||
|
catalogFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
catalogFilter = catalog.getText();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( schema == null ) {
|
||||||
|
schemaFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
schemaFilter = schema.getText();
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getPrimaryKeys(
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getPrimaryKeys(
|
||||||
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
|
catalogFilter,
|
||||||
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
|
schemaFilter,
|
||||||
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
|
tableInformation.getName().getTableName().getText()
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<ColumnInformation> pkColumns = new ArrayList<ColumnInformation>();
|
final List<ColumnInformation> pkColumns = new ArrayList<ColumnInformation>();
|
||||||
|
@ -514,7 +633,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
final String currentPkName = resultSet.getString( "PK_NAME" );
|
final String currentPkName = resultSet.getString( "PK_NAME" );
|
||||||
final Identifier currentPkIdentifier = currentPkName == null
|
final Identifier currentPkIdentifier = currentPkName == null
|
||||||
? null
|
? null
|
||||||
: identifierHelper().toIdentifier( currentPkName );
|
: DatabaseIdentifier.toIdentifier( currentPkName );
|
||||||
if ( firstPass ) {
|
if ( firstPass ) {
|
||||||
pkIdentifier = currentPkIdentifier;
|
pkIdentifier = currentPkIdentifier;
|
||||||
firstPass = false;
|
firstPass = false;
|
||||||
|
@ -531,9 +650,10 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
}
|
}
|
||||||
|
|
||||||
final int columnPosition = resultSet.getInt( "KEY_SEQ" );
|
final int columnPosition = resultSet.getInt( "KEY_SEQ" );
|
||||||
final String columnName = resultSet.getString( "COLUMN_NAME" );
|
|
||||||
|
|
||||||
final Identifier columnIdentifier = identifierHelper().toIdentifier( columnName );
|
final Identifier columnIdentifier = DatabaseIdentifier.toIdentifier(
|
||||||
|
resultSet.getString( "COLUMN_NAME" )
|
||||||
|
);
|
||||||
final ColumnInformation column = tableInformation.getColumn( columnIdentifier );
|
final ColumnInformation column = tableInformation.getColumn( columnIdentifier );
|
||||||
pkColumns.add( columnPosition-1, column );
|
pkColumns.add( columnPosition-1, column );
|
||||||
}
|
}
|
||||||
|
@ -565,13 +685,33 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterable<IndexInformation> getIndexes(TableInformation tableInformation) {
|
public Iterable<IndexInformation> getIndexes(TableInformation tableInformation) {
|
||||||
final Map<Identifier, IndexInformationImpl.Builder> builders = new HashMap<Identifier, IndexInformationImpl.Builder>();
|
final Map<Identifier, IndexInformationImpl.Builder> builders = new HashMap<>();
|
||||||
|
final QualifiedTableName tableName = tableInformation.getName();
|
||||||
|
final Identifier catalog = tableName.getCatalogName();
|
||||||
|
final Identifier schema = tableName.getSchemaName();
|
||||||
|
|
||||||
|
final String catalogFilter;
|
||||||
|
final String schemaFilter;
|
||||||
|
|
||||||
|
if ( catalog == null ) {
|
||||||
|
catalogFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
catalogFilter = catalog.getText();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( schema == null ) {
|
||||||
|
schemaFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
schemaFilter = schema.getText();
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getIndexInfo(
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getIndexInfo(
|
||||||
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
|
catalogFilter,
|
||||||
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
|
schemaFilter,
|
||||||
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() ),
|
tableName.getTableName().getText(),
|
||||||
false, // DO NOT limit to just unique
|
false, // DO NOT limit to just unique
|
||||||
true // DO require up-to-date results
|
true // DO require up-to-date results
|
||||||
);
|
);
|
||||||
|
@ -582,10 +722,8 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
final Identifier indexIdentifier = identifierHelper().toIdentifier(
|
final Identifier indexIdentifier = DatabaseIdentifier.toIdentifier(
|
||||||
resultSet.getString(
|
resultSet.getString( "INDEX_NAME" )
|
||||||
"INDEX_NAME"
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
IndexInformationImpl.Builder builder = builders.get( indexIdentifier );
|
IndexInformationImpl.Builder builder = builders.get( indexIdentifier );
|
||||||
if ( builder == null ) {
|
if ( builder == null ) {
|
||||||
|
@ -593,7 +731,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
builders.put( indexIdentifier, builder );
|
builders.put( indexIdentifier, builder );
|
||||||
}
|
}
|
||||||
|
|
||||||
final Identifier columnIdentifier = identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) );
|
final Identifier columnIdentifier = DatabaseIdentifier.toIdentifier( resultSet.getString( "COLUMN_NAME" ) );
|
||||||
final ColumnInformation columnInformation = tableInformation.getColumn( columnIdentifier );
|
final ColumnInformation columnInformation = tableInformation.getColumn( columnIdentifier );
|
||||||
if ( columnInformation == null ) {
|
if ( columnInformation == null ) {
|
||||||
// See HHH-10191: this may happen when dealing with Oracle/PostgreSQL function indexes
|
// See HHH-10191: this may happen when dealing with Oracle/PostgreSQL function indexes
|
||||||
|
@ -626,13 +764,33 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation) {
|
public Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation) {
|
||||||
final Map<Identifier, ForeignKeyBuilder> fkBuilders = new HashMap<Identifier, ForeignKeyBuilder>();
|
final Map<Identifier, ForeignKeyBuilder> fkBuilders = new HashMap<>();
|
||||||
|
final QualifiedTableName tableName = tableInformation.getName();
|
||||||
|
final Identifier catalog = tableName.getCatalogName();
|
||||||
|
final Identifier schema = tableName.getSchemaName();
|
||||||
|
|
||||||
|
final String catalogFilter;
|
||||||
|
final String schemaFilter;
|
||||||
|
|
||||||
|
if ( catalog == null ) {
|
||||||
|
catalogFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
catalogFilter = catalog.getText();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( schema == null ) {
|
||||||
|
schemaFilter = "";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
schemaFilter = schema.getText();
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getImportedKeys(
|
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getImportedKeys(
|
||||||
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
|
catalogFilter,
|
||||||
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
|
schemaFilter,
|
||||||
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
|
tableInformation.getName().getTableName().getText()
|
||||||
);
|
);
|
||||||
|
|
||||||
// todo : need to account for getCrossReference() as well...
|
// todo : need to account for getCrossReference() as well...
|
||||||
|
@ -640,7 +798,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
try {
|
try {
|
||||||
while ( resultSet.next() ) {
|
while ( resultSet.next() ) {
|
||||||
// IMPL NOTE : The builder is mainly used to collect the column reference mappings
|
// IMPL NOTE : The builder is mainly used to collect the column reference mappings
|
||||||
final Identifier fkIdentifier = identifierHelper().toIdentifier(
|
final Identifier fkIdentifier = DatabaseIdentifier.toIdentifier(
|
||||||
resultSet.getString( "FK_NAME" )
|
resultSet.getString( "FK_NAME" )
|
||||||
);
|
);
|
||||||
ForeignKeyBuilder fkBuilder = fkBuilders.get( fkIdentifier );
|
ForeignKeyBuilder fkBuilder = fkBuilders.get( fkIdentifier );
|
||||||
|
@ -662,10 +820,10 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
final Identifier fkColumnIdentifier = identifierHelper().toIdentifier(
|
final Identifier fkColumnIdentifier = DatabaseIdentifier.toIdentifier(
|
||||||
resultSet.getString( "FKCOLUMN_NAME" )
|
resultSet.getString( "FKCOLUMN_NAME" )
|
||||||
);
|
);
|
||||||
final Identifier pkColumnIdentifier = identifierHelper().toIdentifier(
|
final Identifier pkColumnIdentifier = DatabaseIdentifier.toIdentifier(
|
||||||
resultSet.getString( "PKCOLUMN_NAME" )
|
resultSet.getString( "PKCOLUMN_NAME" )
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -742,4 +900,16 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
||||||
|
|
||||||
return new QualifiedTableName( catalog, schema, table );
|
return new QualifiedTableName( catalog, schema, table );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private QualifiedTableName extractTableName(ResultSet resultSet) throws SQLException {
|
||||||
|
final String incomingCatalogName = resultSet.getString( "TABLE_CAT" );
|
||||||
|
final String incomingSchemaName = resultSet.getString( "TABLE_SCHEM" );
|
||||||
|
final String incomingTableName = resultSet.getString( "TABLE_NAME" );
|
||||||
|
|
||||||
|
final DatabaseIdentifier catalog = DatabaseIdentifier.toIdentifier( incomingCatalogName );
|
||||||
|
final DatabaseIdentifier schema = DatabaseIdentifier.toIdentifier( incomingSchemaName );
|
||||||
|
final DatabaseIdentifier table = DatabaseIdentifier.toIdentifier( incomingTableName );
|
||||||
|
|
||||||
|
return new QualifiedTableName( catalog, schema, table );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.hibernate.boot.model.naming.Identifier;
|
import org.hibernate.boot.model.naming.Identifier;
|
||||||
import org.hibernate.boot.model.relational.QualifiedTableName;
|
import org.hibernate.boot.model.relational.QualifiedTableName;
|
||||||
|
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||||
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
|
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
||||||
|
@ -27,6 +28,8 @@ import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
*/
|
*/
|
||||||
public class TableInformationImpl implements TableInformation {
|
public class TableInformationImpl implements TableInformation {
|
||||||
private final InformationExtractor extractor;
|
private final InformationExtractor extractor;
|
||||||
|
private final IdentifierHelper identifierHelper;
|
||||||
|
|
||||||
private final QualifiedTableName tableName;
|
private final QualifiedTableName tableName;
|
||||||
private final boolean physicalTable;
|
private final boolean physicalTable;
|
||||||
private final String comment;
|
private final String comment;
|
||||||
|
@ -34,15 +37,18 @@ public class TableInformationImpl implements TableInformation {
|
||||||
private PrimaryKeyInformation primaryKey;
|
private PrimaryKeyInformation primaryKey;
|
||||||
private Map<Identifier, ForeignKeyInformation> foreignKeys;
|
private Map<Identifier, ForeignKeyInformation> foreignKeys;
|
||||||
private Map<Identifier, IndexInformation> indexes;
|
private Map<Identifier, IndexInformation> indexes;
|
||||||
|
private Map<Identifier, ColumnInformation> columns = new HashMap<>( );
|
||||||
|
|
||||||
private boolean wasPrimaryKeyLoaded = false; // to avoid multiple db reads since primary key can be null.
|
private boolean wasPrimaryKeyLoaded = false; // to avoid multiple db reads since primary key can be null.
|
||||||
|
|
||||||
public TableInformationImpl(
|
public TableInformationImpl(
|
||||||
InformationExtractor extractor,
|
InformationExtractor extractor,
|
||||||
|
IdentifierHelper identifierHelper,
|
||||||
QualifiedTableName tableName,
|
QualifiedTableName tableName,
|
||||||
boolean physicalTable,
|
boolean physicalTable,
|
||||||
String comment) {
|
String comment ) {
|
||||||
this.extractor = extractor;
|
this.extractor = extractor;
|
||||||
|
this.identifierHelper = identifierHelper;
|
||||||
this.tableName = tableName;
|
this.tableName = tableName;
|
||||||
this.physicalTable = physicalTable;
|
this.physicalTable = physicalTable;
|
||||||
this.comment = comment;
|
this.comment = comment;
|
||||||
|
@ -65,7 +71,10 @@ public class TableInformationImpl implements TableInformation {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ColumnInformation getColumn(Identifier columnIdentifier) {
|
public ColumnInformation getColumn(Identifier columnIdentifier) {
|
||||||
return extractor.getColumn( this, columnIdentifier );
|
return columns.get( new Identifier(
|
||||||
|
identifierHelper.toMetaDataObjectName( columnIdentifier ),
|
||||||
|
false
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -84,7 +93,7 @@ public class TableInformationImpl implements TableInformation {
|
||||||
|
|
||||||
protected Map<Identifier, ForeignKeyInformation> foreignKeys() {
|
protected Map<Identifier, ForeignKeyInformation> foreignKeys() {
|
||||||
if ( foreignKeys == null ) {
|
if ( foreignKeys == null ) {
|
||||||
final Map<Identifier, ForeignKeyInformation> fkMap = new HashMap<Identifier, ForeignKeyInformation>();
|
final Map<Identifier, ForeignKeyInformation> fkMap = new HashMap<>();
|
||||||
final Iterable<ForeignKeyInformation> fks = extractor.getForeignKeys( this );
|
final Iterable<ForeignKeyInformation> fks = extractor.getForeignKeys( this );
|
||||||
for ( ForeignKeyInformation fk : fks ) {
|
for ( ForeignKeyInformation fk : fks ) {
|
||||||
fkMap.put( fk.getForeignKeyIdentifier(), fk );
|
fkMap.put( fk.getForeignKeyIdentifier(), fk );
|
||||||
|
@ -96,7 +105,10 @@ public class TableInformationImpl implements TableInformation {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ForeignKeyInformation getForeignKey(Identifier fkIdentifier) {
|
public ForeignKeyInformation getForeignKey(Identifier fkIdentifier) {
|
||||||
return foreignKeys().get( fkIdentifier );
|
return foreignKeys().get( new Identifier(
|
||||||
|
identifierHelper.toMetaDataObjectName( fkIdentifier ),
|
||||||
|
false
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -106,7 +118,7 @@ public class TableInformationImpl implements TableInformation {
|
||||||
|
|
||||||
protected Map<Identifier, IndexInformation> indexes() {
|
protected Map<Identifier, IndexInformation> indexes() {
|
||||||
if ( indexes == null ) {
|
if ( indexes == null ) {
|
||||||
final Map<Identifier, IndexInformation> indexMap = new HashMap<Identifier, IndexInformation>();
|
final Map<Identifier, IndexInformation> indexMap = new HashMap<>();
|
||||||
final Iterable<IndexInformation> indexes = extractor.getIndexes( this );
|
final Iterable<IndexInformation> indexes = extractor.getIndexes( this );
|
||||||
for ( IndexInformation index : indexes ) {
|
for ( IndexInformation index : indexes ) {
|
||||||
indexMap.put( index.getIndexIdentifier(), index );
|
indexMap.put( index.getIndexIdentifier(), index );
|
||||||
|
@ -116,9 +128,17 @@ public class TableInformationImpl implements TableInformation {
|
||||||
return indexes;
|
return indexes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addColumn(ColumnInformation columnIdentifier) {
|
||||||
|
columns.put( columnIdentifier.getColumnIdentifier(), columnIdentifier );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexInformation getIndex(Identifier indexName) {
|
public IndexInformation getIndex(Identifier indexName) {
|
||||||
return indexes().get( indexName );
|
return indexes().get( new Identifier(
|
||||||
|
identifierHelper.toMetaDataObjectName( indexName ),
|
||||||
|
false
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -60,6 +60,15 @@ public interface DatabaseInformation {
|
||||||
*/
|
*/
|
||||||
TableInformation getTableInformation(QualifiedTableName tableName);
|
TableInformation getTableInformation(QualifiedTableName tableName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain reference to all the {@link TableInformation) for a given {@link Namespace}
|
||||||
|
*
|
||||||
|
* @param namespace The {@link Namespace} which contains the {@link TableInformation)
|
||||||
|
*
|
||||||
|
* @return a {@link NameSpaceTablesInformation}
|
||||||
|
*/
|
||||||
|
NameSpaceTablesInformation getTablesInformation(Namespace namespace);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Obtain reference to the named SequenceInformation
|
* Obtain reference to the named SequenceInformation
|
||||||
*
|
*
|
||||||
|
|
|
@ -56,15 +56,16 @@ public interface InformationExtractor {
|
||||||
TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName);
|
TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return information about column for the given table. Typically called from the TableInformation itself
|
* Extract all the tables information.
|
||||||
* as part of on-demand initialization of its state.
|
|
||||||
*
|
*
|
||||||
* @param tableInformation table info for the matching table
|
* @param catalog Can be {@code null}, indicating that any catalog may be considered a match. A
|
||||||
* @param columnIdentifier The column identifier for which to locate column
|
* non-{@code null} value indicates that search should be limited to the passed catalog.
|
||||||
|
* @param schema Can be {@code null}, indicating that any schema may be considered a match. A
|
||||||
|
* non-{@code null} value indicates that search should be limited to the passed schema .
|
||||||
*
|
*
|
||||||
* @return The extracted column information
|
* @return a {@link NameSpaceTablesInformation}
|
||||||
*/
|
*/
|
||||||
ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier);
|
NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract information about the given table's primary key.
|
* Extract information about the given table's primary key.
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.extract.spi;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||||
|
import org.hibernate.mapping.Table;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*/
|
||||||
|
public class NameSpaceTablesInformation {
|
||||||
|
private final IdentifierHelper identifierHelper;
|
||||||
|
private Map<String, TableInformation> tables = new HashMap<>();
|
||||||
|
|
||||||
|
public NameSpaceTablesInformation(IdentifierHelper identifierHelper) {
|
||||||
|
this.identifierHelper = identifierHelper;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addTableInformation(TableInformation tableInformation) {
|
||||||
|
tables.put( tableInformation.getName().getTableName().getText(), tableInformation );
|
||||||
|
}
|
||||||
|
|
||||||
|
public TableInformation getTableInformation(Table table) {
|
||||||
|
return tables.get( identifierHelper.toMetaDataObjectName( table.getQualifiedTableName().getTableName() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
public TableInformation getTableInformation(String tableName) {
|
||||||
|
return tables.get( tableName );
|
||||||
|
}
|
||||||
|
}
|
|
@ -85,4 +85,6 @@ public interface TableInformation {
|
||||||
* @return The matching index information. May return {@code null}
|
* @return The matching index information. May return {@code null}
|
||||||
*/
|
*/
|
||||||
public IndexInformation getIndex(Identifier indexName);
|
public IndexInformation getIndex(Identifier indexName);
|
||||||
|
|
||||||
|
public void addColumn(ColumnInformation columnIdentifier);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,8 +6,10 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.tool.schema.internal;
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.hibernate.boot.Metadata;
|
import org.hibernate.boot.Metadata;
|
||||||
|
@ -34,6 +36,7 @@ import org.hibernate.tool.hbm2ddl.UniqueConstraintSchemaUpdateStrategy;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
import org.hibernate.tool.schema.extract.spi.IndexInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
|
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
|
@ -50,26 +53,28 @@ import org.jboss.logging.Logger;
|
||||||
|
|
||||||
import static org.hibernate.cfg.AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY;
|
import static org.hibernate.cfg.AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class SchemaMigratorImpl implements SchemaMigrator {
|
public abstract class AbstractSchemaMigrator implements SchemaMigrator {
|
||||||
private static final Logger log = Logger.getLogger( SchemaMigratorImpl.class );
|
private static final Logger log = Logger.getLogger( IndividuallySchemaMigratorImpl.class );
|
||||||
|
|
||||||
private final HibernateSchemaManagementTool tool;
|
protected HibernateSchemaManagementTool tool;
|
||||||
private final SchemaFilter schemaFilter;
|
protected SchemaFilter schemaFilter;
|
||||||
|
|
||||||
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
|
public AbstractSchemaMigrator(
|
||||||
|
HibernateSchemaManagementTool tool,
|
||||||
public SchemaMigratorImpl(HibernateSchemaManagementTool tool) {
|
SchemaFilter schemaFilter) {
|
||||||
this( tool, DefaultSchemaFilter.INSTANCE );
|
|
||||||
}
|
|
||||||
|
|
||||||
public SchemaMigratorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
|
|
||||||
this.tool = tool;
|
this.tool = tool;
|
||||||
|
if ( schemaFilter == null ) {
|
||||||
|
this.schemaFilter = DefaultSchemaFilter.INSTANCE;
|
||||||
|
}
|
||||||
|
else {
|
||||||
this.schemaFilter = schemaFilter;
|
this.schemaFilter = schemaFilter;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For testing...
|
* For testing...
|
||||||
|
@ -80,14 +85,9 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
|
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
|
||||||
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
|
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
|
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
|
||||||
|
|
||||||
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
|
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
|
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
|
||||||
tool.getServiceRegistry(),
|
tool.getServiceRegistry(),
|
||||||
|
@ -102,7 +102,23 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
doMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
|
for ( GenerationTarget target : targets ) {
|
||||||
|
target.prepare();
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
performMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
for ( GenerationTarget target : targets ) {
|
||||||
|
try {
|
||||||
|
target.release();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
|
@ -117,31 +133,19 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
ddlTransactionIsolator.release();
|
ddlTransactionIsolator.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void doMigration(
|
protected abstract NameSpaceTablesInformation performTablesMigration(
|
||||||
Metadata metadata,
|
Metadata metadata,
|
||||||
DatabaseInformation existingDatabase,
|
DatabaseInformation existingDatabase,
|
||||||
ExecutionOptions options,
|
ExecutionOptions options,
|
||||||
Dialect dialect,
|
Dialect dialect,
|
||||||
GenerationTarget... targets) {
|
Formatter formatter,
|
||||||
for ( GenerationTarget target : targets ) {
|
Set<String> exportIdentifiers,
|
||||||
target.prepare();
|
boolean tryToCreateCatalogs,
|
||||||
}
|
boolean tryToCreateSchemas,
|
||||||
|
Set<Identifier> exportedCatalogs,
|
||||||
try {
|
Namespace namespace, GenerationTarget[] targets);
|
||||||
performMigration( metadata, existingDatabase, options, dialect, targets );
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
for ( GenerationTarget target : targets ) {
|
|
||||||
try {
|
|
||||||
target.release();
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void performMigration(
|
private void performMigration(
|
||||||
Metadata metadata,
|
Metadata metadata,
|
||||||
|
@ -158,28 +162,21 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
|
|
||||||
// Drop all AuxiliaryDatabaseObjects
|
// Drop all AuxiliaryDatabaseObjects
|
||||||
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
||||||
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
applySqlStrings(
|
||||||
true,
|
true,
|
||||||
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
|
dialect.getAuxiliaryDatabaseObjectExporter()
|
||||||
|
.getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
|
||||||
formatter,
|
formatter,
|
||||||
options,
|
options,
|
||||||
targets
|
targets
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create beforeQuery-table AuxiliaryDatabaseObjects
|
// Create beforeQuery-table AuxiliaryDatabaseObjects
|
||||||
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
||||||
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
|
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
applySqlStrings(
|
||||||
true,
|
true,
|
||||||
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
|
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
|
||||||
|
@ -188,6 +185,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
targets
|
targets
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
boolean tryToCreateCatalogs = false;
|
boolean tryToCreateCatalogs = false;
|
||||||
boolean tryToCreateSchemas = false;
|
boolean tryToCreateSchemas = false;
|
||||||
|
@ -199,12 +197,289 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
tryToCreateCatalogs = true;
|
tryToCreateCatalogs = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
final Map<Namespace, NameSpaceTablesInformation> tablesInformation = new HashMap<>();
|
||||||
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
|
Set<Identifier> exportedCatalogs = new HashSet<>();
|
||||||
for ( Namespace namespace : database.getNamespaces() ) {
|
for ( Namespace namespace : database.getNamespaces() ) {
|
||||||
if ( !schemaFilter.includeNamespace( namespace ) ) {
|
final NameSpaceTablesInformation nameSpaceTablesInformation = performTablesMigration(
|
||||||
continue;
|
metadata,
|
||||||
|
existingDatabase,
|
||||||
|
options,
|
||||||
|
dialect,
|
||||||
|
formatter,
|
||||||
|
exportIdentifiers,
|
||||||
|
tryToCreateCatalogs,
|
||||||
|
tryToCreateSchemas,
|
||||||
|
exportedCatalogs,
|
||||||
|
namespace,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
tablesInformation.put( namespace, nameSpaceTablesInformation );
|
||||||
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
|
for ( Sequence sequence : namespace.getSequences() ) {
|
||||||
|
checkExportIdentifier( sequence, exportIdentifiers );
|
||||||
|
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
|
||||||
|
if ( sequenceInformation == null ) {
|
||||||
|
applySqlStrings(
|
||||||
|
false,
|
||||||
|
dialect.getSequenceExporter().getSqlCreateStrings(
|
||||||
|
sequence,
|
||||||
|
metadata
|
||||||
|
),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//NOTE : Foreign keys must be created *afterQuery* all tables of all namespaces for cross namespace fks. see HHH-10420
|
||||||
|
for ( Namespace namespace : database.getNamespaces() ) {
|
||||||
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
|
final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace );
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) ) {
|
||||||
|
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
|
||||||
|
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
|
||||||
|
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create afterQuery-table AuxiliaryDatabaseObjects
|
||||||
|
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
||||||
|
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect )) {
|
||||||
|
applySqlStrings(
|
||||||
|
true,
|
||||||
|
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void createTable(
|
||||||
|
Table table,
|
||||||
|
Dialect dialect,
|
||||||
|
Metadata metadata,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
applySqlStrings(
|
||||||
|
false,
|
||||||
|
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void migrateTable(
|
||||||
|
Table table,
|
||||||
|
TableInformation tableInformation,
|
||||||
|
Dialect dialect,
|
||||||
|
Metadata metadata,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
final Database database = metadata.getDatabase();
|
||||||
|
|
||||||
|
//noinspection unchecked
|
||||||
|
applySqlStrings(
|
||||||
|
false,
|
||||||
|
table.sqlAlterStrings(
|
||||||
|
dialect,
|
||||||
|
metadata,
|
||||||
|
tableInformation,
|
||||||
|
getDefaultCatalogName( database, dialect ),
|
||||||
|
getDefaultSchemaName( database, dialect )
|
||||||
|
),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void applyIndexes(
|
||||||
|
Table table,
|
||||||
|
TableInformation tableInformation,
|
||||||
|
Dialect dialect,
|
||||||
|
Metadata metadata,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
final Exporter<Index> exporter = dialect.getIndexExporter();
|
||||||
|
|
||||||
|
final Iterator<Index> indexItr = table.getIndexIterator();
|
||||||
|
while ( indexItr.hasNext() ) {
|
||||||
|
final Index index = indexItr.next();
|
||||||
|
if ( !StringHelper.isEmpty( index.getName() ) ) {
|
||||||
|
IndexInformation existingIndex = null;
|
||||||
|
if ( tableInformation != null ) {
|
||||||
|
existingIndex = findMatchingIndex( index, tableInformation );
|
||||||
|
}
|
||||||
|
if ( existingIndex == null ) {
|
||||||
|
applySqlStrings(
|
||||||
|
false,
|
||||||
|
exporter.getSqlCreateStrings( index, metadata ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private IndexInformation findMatchingIndex(Index index, TableInformation tableInformation) {
|
||||||
|
return tableInformation.getIndex( Identifier.toIdentifier( index.getName() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void applyUniqueKeys(
|
||||||
|
Table table,
|
||||||
|
TableInformation tableInfo,
|
||||||
|
Dialect dialect,
|
||||||
|
Metadata metadata,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
if ( uniqueConstraintStrategy == null ) {
|
||||||
|
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( uniqueConstraintStrategy != UniqueConstraintSchemaUpdateStrategy.SKIP ) {
|
||||||
|
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
|
||||||
|
|
||||||
|
final Iterator ukItr = table.getUniqueKeyIterator();
|
||||||
|
while ( ukItr.hasNext() ) {
|
||||||
|
final UniqueKey uniqueKey = (UniqueKey) ukItr.next();
|
||||||
|
// Skip if index already exists. Most of the time, this
|
||||||
|
// won't work since most Dialects use Constraints. However,
|
||||||
|
// keep it for the few that do use Indexes.
|
||||||
|
IndexInformation indexInfo = null;
|
||||||
|
if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
|
||||||
|
indexInfo = tableInfo.getIndex( Identifier.toIdentifier( uniqueKey.getName() ) );
|
||||||
|
}
|
||||||
|
if ( indexInfo == null ) {
|
||||||
|
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
|
||||||
|
applySqlStrings(
|
||||||
|
true,
|
||||||
|
exporter.getSqlDropStrings( uniqueKey, metadata ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
applySqlStrings(
|
||||||
|
true,
|
||||||
|
exporter.getSqlCreateStrings( uniqueKey, metadata ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy(Metadata metadata) {
|
||||||
|
final ConfigurationService cfgService = ((MetadataImplementor) metadata).getMetadataBuildingOptions()
|
||||||
|
.getServiceRegistry()
|
||||||
|
.getService( ConfigurationService.class );
|
||||||
|
|
||||||
|
return UniqueConstraintSchemaUpdateStrategy.interpret(
|
||||||
|
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void applyForeignKeys(
|
||||||
|
Table table,
|
||||||
|
TableInformation tableInformation,
|
||||||
|
Dialect dialect,
|
||||||
|
Metadata metadata,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
if ( dialect.hasAlterTable() ) {
|
||||||
|
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
final Iterator<ForeignKey> fkItr = table.getForeignKeyIterator();
|
||||||
|
while ( fkItr.hasNext() ) {
|
||||||
|
final ForeignKey foreignKey = fkItr.next();
|
||||||
|
if ( foreignKey.isPhysicalConstraint() && foreignKey.isCreationEnabled() ) {
|
||||||
|
ForeignKeyInformation existingForeignKey = null;
|
||||||
|
if ( tableInformation != null ) {
|
||||||
|
existingForeignKey = findMatchingForeignKey(
|
||||||
|
foreignKey,
|
||||||
|
tableInformation
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if ( existingForeignKey == null ) {
|
||||||
|
// todo : shouldn't we just drop+recreate if FK exists?
|
||||||
|
// this follows the existing code from legacy SchemaUpdate which just skipped
|
||||||
|
|
||||||
|
// in old SchemaUpdate code, this was the trigger to "create"
|
||||||
|
applySqlStrings(
|
||||||
|
false,
|
||||||
|
exporter.getSqlCreateStrings( foreignKey, metadata ),
|
||||||
|
formatter,
|
||||||
|
options,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ForeignKeyInformation findMatchingForeignKey(ForeignKey foreignKey, TableInformation tableInformation) {
|
||||||
|
if ( foreignKey.getName() == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
|
||||||
|
final String exportIdentifier = exportable.getExportIdentifier();
|
||||||
|
if ( exportIdentifiers.contains( exportIdentifier ) ) {
|
||||||
|
throw new SchemaManagementException(
|
||||||
|
String.format(
|
||||||
|
"Export identifier [%s] encountered more than once",
|
||||||
|
exportIdentifier
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
exportIdentifiers.add( exportIdentifier );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static void applySqlStrings(
|
||||||
|
boolean quiet,
|
||||||
|
String[] sqlStrings,
|
||||||
|
Formatter formatter,
|
||||||
|
ExecutionOptions options,
|
||||||
|
GenerationTarget... targets) {
|
||||||
|
if ( sqlStrings != null ) {
|
||||||
|
for ( String sqlString : sqlStrings ) {
|
||||||
|
applySqlString( quiet, sqlString, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void createSchemaAndCatalog(
|
||||||
|
DatabaseInformation existingDatabase,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect,
|
||||||
|
Formatter formatter,
|
||||||
|
boolean tryToCreateCatalogs,
|
||||||
|
boolean tryToCreateSchemas,
|
||||||
|
Set<Identifier> exportedCatalogs, Namespace namespace, GenerationTarget[] targets) {
|
||||||
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
|
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
|
||||||
if ( tryToCreateCatalogs ) {
|
if ( tryToCreateCatalogs ) {
|
||||||
final Identifier catalogLogicalName = namespace.getName().getCatalog();
|
final Identifier catalogLogicalName = namespace.getName().getCatalog();
|
||||||
|
@ -235,322 +510,6 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( Table table : namespace.getTables() ) {
|
|
||||||
if ( !table.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( !schemaFilter.includeTable( table ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
checkExportIdentifier( table, exportIdentifiers );
|
|
||||||
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
|
|
||||||
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( tableInformation == null ) {
|
|
||||||
createTable( table, dialect, metadata, formatter, options, targets );
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( Table table : namespace.getTables() ) {
|
|
||||||
if ( !table.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( !schemaFilter.includeTable( table ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
|
|
||||||
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
|
|
||||||
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( Sequence sequence : namespace.getSequences() ) {
|
|
||||||
checkExportIdentifier( sequence, exportIdentifiers );
|
|
||||||
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
|
|
||||||
if ( sequenceInformation != null ) {
|
|
||||||
// nothing we really can do...
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
|
||||||
false,
|
|
||||||
dialect.getSequenceExporter().getSqlCreateStrings(
|
|
||||||
sequence,
|
|
||||||
metadata
|
|
||||||
),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//NOTE : Foreign keys must be created *afterQuery* all tables of all namespaces for cross namespace fks. see HHH-10420
|
|
||||||
for ( Namespace namespace : database.getNamespaces() ) {
|
|
||||||
if ( !schemaFilter.includeNamespace( namespace ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
for ( Table table : namespace.getTables() ) {
|
|
||||||
if ( !schemaFilter.includeTable( table ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
|
|
||||||
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create afterQuery-table AuxiliaryDatabaseObjects
|
|
||||||
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
|
|
||||||
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
|
||||||
true,
|
|
||||||
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void createTable(
|
|
||||||
Table table,
|
|
||||||
Dialect dialect,
|
|
||||||
Metadata metadata,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
applySqlStrings(
|
|
||||||
false,
|
|
||||||
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void migrateTable(
|
|
||||||
Table table,
|
|
||||||
TableInformation tableInformation,
|
|
||||||
Dialect dialect,
|
|
||||||
Metadata metadata,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
final Database database = metadata.getDatabase();
|
|
||||||
|
|
||||||
//noinspection unchecked
|
|
||||||
applySqlStrings(
|
|
||||||
false,
|
|
||||||
table.sqlAlterStrings(
|
|
||||||
dialect,
|
|
||||||
metadata,
|
|
||||||
tableInformation,
|
|
||||||
getDefaultCatalogName( database, dialect ),
|
|
||||||
getDefaultSchemaName( database, dialect )
|
|
||||||
),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void applyIndexes(
|
|
||||||
Table table,
|
|
||||||
TableInformation tableInformation,
|
|
||||||
Dialect dialect,
|
|
||||||
Metadata metadata,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
final Exporter<Index> exporter = dialect.getIndexExporter();
|
|
||||||
|
|
||||||
final Iterator<Index> indexItr = table.getIndexIterator();
|
|
||||||
while ( indexItr.hasNext() ) {
|
|
||||||
final Index index = indexItr.next();
|
|
||||||
if ( StringHelper.isEmpty( index.getName() ) ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( tableInformation != null ) {
|
|
||||||
final IndexInformation existingIndex = findMatchingIndex( index, tableInformation );
|
|
||||||
if ( existingIndex != null ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
|
||||||
false,
|
|
||||||
exporter.getSqlCreateStrings( index, metadata ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private IndexInformation findMatchingIndex(Index index, TableInformation tableInformation) {
|
|
||||||
return tableInformation.getIndex( Identifier.toIdentifier( index.getName() ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
private void applyUniqueKeys(
|
|
||||||
Table table,
|
|
||||||
TableInformation tableInfo,
|
|
||||||
Dialect dialect,
|
|
||||||
Metadata metadata,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
if ( uniqueConstraintStrategy == null ) {
|
|
||||||
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.SKIP ) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
|
|
||||||
|
|
||||||
final Iterator ukItr = table.getUniqueKeyIterator();
|
|
||||||
while ( ukItr.hasNext() ) {
|
|
||||||
final UniqueKey uniqueKey = (UniqueKey) ukItr.next();
|
|
||||||
// Skip if index already exists. Most of the time, this
|
|
||||||
// won't work since most Dialects use Constraints. However,
|
|
||||||
// keep it for the few that do use Indexes.
|
|
||||||
if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
|
|
||||||
final IndexInformation indexInfo = tableInfo.getIndex( Identifier.toIdentifier( uniqueKey.getName() ) );
|
|
||||||
if ( indexInfo != null ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
|
|
||||||
applySqlStrings(
|
|
||||||
true,
|
|
||||||
exporter.getSqlDropStrings( uniqueKey, metadata ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
applySqlStrings(
|
|
||||||
true,
|
|
||||||
exporter.getSqlCreateStrings( uniqueKey, metadata ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy(Metadata metadata) {
|
|
||||||
final ConfigurationService cfgService = ((MetadataImplementor) metadata).getMetadataBuildingOptions()
|
|
||||||
.getServiceRegistry()
|
|
||||||
.getService( ConfigurationService.class );
|
|
||||||
|
|
||||||
return UniqueConstraintSchemaUpdateStrategy.interpret(
|
|
||||||
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void applyForeignKeys(
|
|
||||||
Table table,
|
|
||||||
TableInformation tableInformation,
|
|
||||||
Dialect dialect,
|
|
||||||
Metadata metadata,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
if ( !dialect.hasAlterTable() ) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
final Iterator<ForeignKey> fkItr = table.getForeignKeyIterator();
|
|
||||||
while ( fkItr.hasNext() ) {
|
|
||||||
final ForeignKey foreignKey = fkItr.next();
|
|
||||||
if ( !foreignKey.isPhysicalConstraint() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( !foreignKey.isCreationEnabled() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( tableInformation != null ) {
|
|
||||||
final ForeignKeyInformation existingForeignKey = findMatchingForeignKey( foreignKey, tableInformation );
|
|
||||||
if ( existingForeignKey != null ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// todo : shouldn't we just drop+recreate if FK exists?
|
|
||||||
// this follows the existing code from legacy SchemaUpdate which just skipped
|
|
||||||
|
|
||||||
// in old SchemaUpdate code, this was the trigger to "create"
|
|
||||||
applySqlStrings(
|
|
||||||
false,
|
|
||||||
exporter.getSqlCreateStrings( foreignKey, metadata ),
|
|
||||||
formatter,
|
|
||||||
options,
|
|
||||||
targets
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private ForeignKeyInformation findMatchingForeignKey(ForeignKey foreignKey, TableInformation tableInformation) {
|
|
||||||
if ( foreignKey.getName() == null ) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
|
|
||||||
final String exportIdentifier = exportable.getExportIdentifier();
|
|
||||||
if ( exportIdentifiers.contains( exportIdentifier ) ) {
|
|
||||||
throw new SchemaManagementException(
|
|
||||||
String.format(
|
|
||||||
"Export identifier [%s] encountered more than once",
|
|
||||||
exportIdentifier
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
exportIdentifiers.add( exportIdentifier );
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void applySqlStrings(
|
|
||||||
boolean quiet,
|
|
||||||
String[] sqlStrings,
|
|
||||||
Formatter formatter,
|
|
||||||
ExecutionOptions options,
|
|
||||||
GenerationTarget... targets) {
|
|
||||||
if ( sqlStrings == null ) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( String sqlString : sqlStrings ) {
|
|
||||||
applySqlString( quiet, sqlString, formatter, options, targets );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void applySqlString(
|
private static void applySqlString(
|
||||||
|
@ -559,10 +518,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
Formatter formatter,
|
Formatter formatter,
|
||||||
ExecutionOptions options,
|
ExecutionOptions options,
|
||||||
GenerationTarget... targets) {
|
GenerationTarget... targets) {
|
||||||
if ( StringHelper.isEmpty( sqlString ) ) {
|
if ( !StringHelper.isEmpty( sqlString ) ) {
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( GenerationTarget target : targets ) {
|
for ( GenerationTarget target : targets ) {
|
||||||
try {
|
try {
|
||||||
target.accept( formatter.format( sqlString ) );
|
target.accept( formatter.format( sqlString ) );
|
||||||
|
@ -575,6 +531,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void applySqlStrings(
|
private static void applySqlStrings(
|
||||||
boolean quiet,
|
boolean quiet,
|
||||||
|
@ -582,15 +539,13 @@ public class SchemaMigratorImpl implements SchemaMigrator {
|
||||||
Formatter formatter,
|
Formatter formatter,
|
||||||
ExecutionOptions options,
|
ExecutionOptions options,
|
||||||
GenerationTarget... targets) {
|
GenerationTarget... targets) {
|
||||||
if ( sqlStrings == null ) {
|
if ( sqlStrings != null ) {
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
while ( sqlStrings.hasNext() ) {
|
while ( sqlStrings.hasNext() ) {
|
||||||
final String sqlString = sqlStrings.next();
|
final String sqlString = sqlStrings.next();
|
||||||
applySqlString( quiet, sqlString, formatter, options, targets );
|
applySqlString( quiet, sqlString, formatter, options, targets );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private String getDefaultCatalogName(Database database, Dialect dialect) {
|
private String getDefaultCatalogName(Database database, Dialect dialect) {
|
||||||
final Identifier identifier = database.getDefaultNamespace().getPhysicalName().getCatalog();
|
final Identifier identifier = database.getDefaultNamespace().getPhysicalName().getCatalog();
|
|
@ -34,19 +34,22 @@ import org.jboss.logging.Logger;
|
||||||
/**
|
/**
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class SchemaValidatorImpl implements SchemaValidator {
|
public abstract class AbstractSchemaValidator implements SchemaValidator {
|
||||||
private static final Logger log = Logger.getLogger( SchemaValidatorImpl.class );
|
private static final Logger log = Logger.getLogger( AbstractSchemaValidator.class );
|
||||||
|
|
||||||
private final HibernateSchemaManagementTool tool;
|
protected HibernateSchemaManagementTool tool;
|
||||||
private final SchemaFilter schemaFilter;
|
protected SchemaFilter schemaFilter;
|
||||||
|
|
||||||
public SchemaValidatorImpl(HibernateSchemaManagementTool tool) {
|
public AbstractSchemaValidator(
|
||||||
this( tool, DefaultSchemaFilter.INSTANCE );
|
HibernateSchemaManagementTool tool,
|
||||||
}
|
SchemaFilter validateFilter) {
|
||||||
|
|
||||||
public SchemaValidatorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
|
|
||||||
this.tool = tool;
|
this.tool = tool;
|
||||||
this.schemaFilter = schemaFilter;
|
if ( validateFilter == null ) {
|
||||||
|
this.schemaFilter = DefaultSchemaFilter.INSTANCE;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.schemaFilter = validateFilter;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -82,35 +85,15 @@ public class SchemaValidatorImpl implements SchemaValidator {
|
||||||
ExecutionOptions options,
|
ExecutionOptions options,
|
||||||
Dialect dialect) {
|
Dialect dialect) {
|
||||||
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
|
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
|
||||||
if ( !schemaFilter.includeNamespace( namespace )) {
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
continue;
|
validateTables( metadata, databaseInformation, options, dialect, namespace );
|
||||||
}
|
|
||||||
|
|
||||||
for ( Table table : namespace.getTables() ) {
|
|
||||||
if ( !schemaFilter.includeTable( table )) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if ( !table.isPhysicalTable() ) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
final TableInformation tableInformation = databaseInformation.getTableInformation(
|
|
||||||
table.getQualifiedTableName()
|
|
||||||
);
|
|
||||||
validateTable( table, tableInformation, metadata, options, dialect );
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
|
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
|
||||||
if ( !schemaFilter.includeNamespace( namespace )) {
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( Sequence sequence : namespace.getSequences() ) {
|
for ( Sequence sequence : namespace.getSequences() ) {
|
||||||
if ( !schemaFilter.includeSequence( sequence )) {
|
if ( schemaFilter.includeSequence( sequence ) ) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation(
|
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation(
|
||||||
sequence.getName()
|
sequence.getName()
|
||||||
);
|
);
|
||||||
|
@ -118,6 +101,14 @@ public class SchemaValidatorImpl implements SchemaValidator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract void validateTables(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation databaseInformation,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect, Namespace namespace);
|
||||||
|
|
||||||
protected void validateTable(
|
protected void validateTable(
|
||||||
Table table,
|
Table table,
|
||||||
|
@ -137,10 +128,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
|
||||||
final Iterator selectableItr = table.getColumnIterator();
|
final Iterator selectableItr = table.getColumnIterator();
|
||||||
while ( selectableItr.hasNext() ) {
|
while ( selectableItr.hasNext() ) {
|
||||||
final Selectable selectable = (Selectable) selectableItr.next();
|
final Selectable selectable = (Selectable) selectableItr.next();
|
||||||
if ( !Column.class.isInstance( selectable ) ) {
|
if ( Column.class.isInstance( selectable ) ) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
final Column column = (Column) selectable;
|
final Column column = (Column) selectable;
|
||||||
final ColumnInformation existingColumn = tableInformation.getColumn( Identifier.toIdentifier( column.getQuotedName() ) );
|
final ColumnInformation existingColumn = tableInformation.getColumn( Identifier.toIdentifier( column.getQuotedName() ) );
|
||||||
if ( existingColumn == null ) {
|
if ( existingColumn == null ) {
|
||||||
|
@ -155,6 +143,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
|
||||||
validateColumnType( table, column, existingColumn, metadata, options, dialect );
|
validateColumnType( table, column, existingColumn, metadata, options, dialect );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
protected void validateColumnType(
|
protected void validateColumnType(
|
||||||
Table table,
|
Table table,
|
|
@ -0,0 +1,92 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.naming.Identifier;
|
||||||
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
|
import org.hibernate.dialect.Dialect;
|
||||||
|
import org.hibernate.engine.jdbc.internal.Formatter;
|
||||||
|
import org.hibernate.mapping.Table;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*
|
||||||
|
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
|
||||||
|
* to retrieve all the database table in order to determine if all the {@link javax.persistence.Entity} have a mapped database tables.
|
||||||
|
*/
|
||||||
|
public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
|
||||||
|
|
||||||
|
public GroupedSchemaMigratorImpl(
|
||||||
|
HibernateSchemaManagementTool tool,
|
||||||
|
SchemaFilter schemaFilter) {
|
||||||
|
super( tool, schemaFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NameSpaceTablesInformation performTablesMigration(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation existingDatabase,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect,
|
||||||
|
Formatter formatter,
|
||||||
|
Set<String> exportIdentifiers,
|
||||||
|
boolean tryToCreateCatalogs,
|
||||||
|
boolean tryToCreateSchemas,
|
||||||
|
Set<Identifier> exportedCatalogs,
|
||||||
|
Namespace namespace, GenerationTarget[] targets) {
|
||||||
|
final NameSpaceTablesInformation tablesInformation =
|
||||||
|
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
|
||||||
|
|
||||||
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
|
createSchemaAndCatalog(
|
||||||
|
existingDatabase,
|
||||||
|
options,
|
||||||
|
dialect,
|
||||||
|
formatter,
|
||||||
|
tryToCreateCatalogs,
|
||||||
|
tryToCreateSchemas,
|
||||||
|
exportedCatalogs,
|
||||||
|
namespace,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
final NameSpaceTablesInformation tables = existingDatabase.getTablesInformation( namespace );
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
checkExportIdentifier( table, exportIdentifiers );
|
||||||
|
final TableInformation tableInformation = tables.getTableInformation( table );
|
||||||
|
if ( tableInformation == null ) {
|
||||||
|
createTable( table, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
else if ( tableInformation != null && tableInformation.isPhysicalTable() ) {
|
||||||
|
tablesInformation.addTableInformation( tableInformation );
|
||||||
|
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
|
||||||
|
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
|
||||||
|
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tablesInformation;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
|
import org.hibernate.dialect.Dialect;
|
||||||
|
import org.hibernate.mapping.Table;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*
|
||||||
|
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
|
||||||
|
* to retrieve all the database table in order to determine if all the {@link javax.persistence.Entity} have a mapped database tables.
|
||||||
|
*/
|
||||||
|
public class GroupedSchemaValidatorImpl extends AbstractSchemaValidator {
|
||||||
|
|
||||||
|
public GroupedSchemaValidatorImpl(
|
||||||
|
HibernateSchemaManagementTool tool,
|
||||||
|
SchemaFilter validateFilter) {
|
||||||
|
super( tool, validateFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void validateTables(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation databaseInformation,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect, Namespace namespace) {
|
||||||
|
|
||||||
|
final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace );
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
validateTable(
|
||||||
|
table,
|
||||||
|
tables.getTableInformation( table ),
|
||||||
|
metadata,
|
||||||
|
options,
|
||||||
|
dialect
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -21,7 +21,7 @@ import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||||
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
||||||
import org.hibernate.service.ServiceRegistry;
|
import org.hibernate.service.ServiceRegistry;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
import org.hibernate.tool.schema.internal.exec.ImprovedDatabaseInformationImpl;
|
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
|
||||||
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
|
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
|
||||||
|
@ -129,7 +129,7 @@ public class Helper {
|
||||||
Namespace.Name defaultNamespace) {
|
Namespace.Name defaultNamespace) {
|
||||||
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
|
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
|
||||||
try {
|
try {
|
||||||
return new ImprovedDatabaseInformationImpl(
|
return new DatabaseInformationImpl(
|
||||||
serviceRegistry,
|
serviceRegistry,
|
||||||
jdbcEnvironment,
|
jdbcEnvironment,
|
||||||
ddlTransactionIsolator,
|
ddlTransactionIsolator,
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
|
||||||
import org.hibernate.service.ServiceRegistry;
|
import org.hibernate.service.ServiceRegistry;
|
||||||
import org.hibernate.service.spi.ServiceRegistryAwareService;
|
import org.hibernate.service.spi.ServiceRegistryAwareService;
|
||||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
import org.hibernate.tool.schema.TargetType;
|
import org.hibernate.tool.schema.TargetType;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
|
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
|
||||||
|
@ -73,12 +74,22 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SchemaMigrator getSchemaMigrator(Map options) {
|
public SchemaMigrator getSchemaMigrator(Map options) {
|
||||||
return new SchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
|
if ( determineJdbcMetadaAccessStrategy( options ) == JdbcMetadaAccessStrategy.GROUPED ) {
|
||||||
|
return new GroupedSchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return new IndividuallySchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SchemaValidator getSchemaValidator(Map options) {
|
public SchemaValidator getSchemaValidator(Map options) {
|
||||||
return new SchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
|
if ( determineJdbcMetadaAccessStrategy( options ) == JdbcMetadaAccessStrategy.GROUPED ) {
|
||||||
|
return new GroupedSchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return new IndividuallySchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
|
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
|
||||||
|
@ -92,6 +103,13 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private JdbcMetadaAccessStrategy determineJdbcMetadaAccessStrategy(Map options) {
|
||||||
|
if ( options == null ) {
|
||||||
|
return JdbcMetadaAccessStrategy.interpretHbm2ddlSetting( null );
|
||||||
|
}
|
||||||
|
return JdbcMetadaAccessStrategy.interpretHbm2ddlSetting( options.get( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY ) );
|
||||||
|
}
|
||||||
|
|
||||||
GenerationTarget[] buildGenerationTargets(
|
GenerationTarget[] buildGenerationTargets(
|
||||||
TargetDescriptor targetDescriptor,
|
TargetDescriptor targetDescriptor,
|
||||||
JdbcContext jdbcContext,
|
JdbcContext jdbcContext,
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.naming.Identifier;
|
||||||
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
|
import org.hibernate.dialect.Dialect;
|
||||||
|
import org.hibernate.engine.jdbc.internal.Formatter;
|
||||||
|
import org.hibernate.mapping.Table;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*
|
||||||
|
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
|
||||||
|
* for each {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
|
||||||
|
*/
|
||||||
|
public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
|
||||||
|
|
||||||
|
public IndividuallySchemaMigratorImpl(
|
||||||
|
HibernateSchemaManagementTool tool,
|
||||||
|
SchemaFilter schemaFilter) {
|
||||||
|
super( tool, schemaFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NameSpaceTablesInformation performTablesMigration(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation existingDatabase,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect,
|
||||||
|
Formatter formatter,
|
||||||
|
Set<String> exportIdentifiers,
|
||||||
|
boolean tryToCreateCatalogs,
|
||||||
|
boolean tryToCreateSchemas,
|
||||||
|
Set<Identifier> exportedCatalogs,
|
||||||
|
Namespace namespace,
|
||||||
|
GenerationTarget[] targets) {
|
||||||
|
final NameSpaceTablesInformation tablesInformation =
|
||||||
|
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
|
||||||
|
|
||||||
|
if ( schemaFilter.includeNamespace( namespace ) ) {
|
||||||
|
createSchemaAndCatalog(
|
||||||
|
existingDatabase,
|
||||||
|
options,
|
||||||
|
dialect,
|
||||||
|
formatter,
|
||||||
|
tryToCreateCatalogs,
|
||||||
|
tryToCreateSchemas,
|
||||||
|
exportedCatalogs,
|
||||||
|
namespace,
|
||||||
|
targets
|
||||||
|
);
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
checkExportIdentifier( table, exportIdentifiers );
|
||||||
|
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
|
||||||
|
if ( tableInformation == null ) {
|
||||||
|
createTable( table, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
else if ( tableInformation != null && tableInformation.isPhysicalTable() ) {
|
||||||
|
tablesInformation.addTableInformation( tableInformation );
|
||||||
|
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
|
||||||
|
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
|
||||||
|
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tablesInformation;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.tool.schema.internal;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.model.relational.Namespace;
|
||||||
|
import org.hibernate.dialect.Dialect;
|
||||||
|
import org.hibernate.mapping.Table;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
|
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
||||||
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*
|
||||||
|
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
|
||||||
|
* for each {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
|
||||||
|
*/
|
||||||
|
public class IndividuallySchemaValidatorImpl extends AbstractSchemaValidator {
|
||||||
|
|
||||||
|
public IndividuallySchemaValidatorImpl(
|
||||||
|
HibernateSchemaManagementTool tool,
|
||||||
|
SchemaFilter validateFilter) {
|
||||||
|
super( tool, validateFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void validateTables(
|
||||||
|
Metadata metadata,
|
||||||
|
DatabaseInformation databaseInformation,
|
||||||
|
ExecutionOptions options,
|
||||||
|
Dialect dialect,
|
||||||
|
Namespace namespace) {
|
||||||
|
for ( Table table : namespace.getTables() ) {
|
||||||
|
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
|
||||||
|
final TableInformation tableInformation = databaseInformation.getTableInformation(
|
||||||
|
table.getQualifiedTableName()
|
||||||
|
);
|
||||||
|
validateTable( table, tableInformation, metadata, options, dialect );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -41,7 +41,7 @@ public class StandardIndexExporter implements Exporter<Index> {
|
||||||
new QualifiedNameImpl(
|
new QualifiedNameImpl(
|
||||||
index.getTable().getQualifiedTableName().getCatalogName(),
|
index.getTable().getQualifiedTableName().getCatalogName(),
|
||||||
index.getTable().getQualifiedTableName().getSchemaName(),
|
index.getTable().getQualifiedTableName().getSchemaName(),
|
||||||
jdbcEnvironment.getIdentifierHelper().toIdentifier( index.getName() )
|
jdbcEnvironment.getIdentifierHelper().toIdentifier( index.getQuotedName( dialect ) )
|
||||||
),
|
),
|
||||||
jdbcEnvironment.getDialect()
|
jdbcEnvironment.getDialect()
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,154 +0,0 @@
|
||||||
/*
|
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
|
||||||
*
|
|
||||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
|
||||||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
|
||||||
*/
|
|
||||||
package org.hibernate.tool.schema.internal.exec;
|
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.hibernate.boot.model.naming.Identifier;
|
|
||||||
import org.hibernate.boot.model.relational.Namespace;
|
|
||||||
import org.hibernate.boot.model.relational.QualifiedSequenceName;
|
|
||||||
import org.hibernate.boot.model.relational.QualifiedTableName;
|
|
||||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
|
||||||
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
|
||||||
import org.hibernate.service.ServiceRegistry;
|
|
||||||
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
|
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
|
||||||
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
|
||||||
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
|
|
||||||
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
|
|
||||||
import org.hibernate.tool.schema.extract.spi.TableInformation;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Steve Ebersole
|
|
||||||
*/
|
|
||||||
public class ImprovedDatabaseInformationImpl
|
|
||||||
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
|
|
||||||
private final JdbcEnvironment jdbcEnvironment;
|
|
||||||
private final ImprovedExtractionContextImpl extractionContext;
|
|
||||||
private final InformationExtractor extractor;
|
|
||||||
|
|
||||||
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
|
|
||||||
|
|
||||||
public ImprovedDatabaseInformationImpl(
|
|
||||||
ServiceRegistry serviceRegistry,
|
|
||||||
JdbcEnvironment jdbcEnvironment,
|
|
||||||
DdlTransactionIsolator ddlTransactionIsolator,
|
|
||||||
Namespace.Name defaultNamespace) throws SQLException {
|
|
||||||
this.jdbcEnvironment = jdbcEnvironment;
|
|
||||||
|
|
||||||
this.extractionContext = new ImprovedExtractionContextImpl(
|
|
||||||
serviceRegistry,
|
|
||||||
jdbcEnvironment,
|
|
||||||
ddlTransactionIsolator,
|
|
||||||
defaultNamespace.getCatalog(),
|
|
||||||
defaultNamespace.getSchema(),
|
|
||||||
this
|
|
||||||
);
|
|
||||||
|
|
||||||
// todo : make this pluggable
|
|
||||||
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
|
|
||||||
|
|
||||||
// because we do not have defined a way to locate sequence info by name
|
|
||||||
initializeSequences();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initializeSequences() throws SQLException {
|
|
||||||
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect()
|
|
||||||
.getSequenceInformationExtractor()
|
|
||||||
.extractMetadata( extractionContext );
|
|
||||||
for ( SequenceInformation sequenceInformation : itr ) {
|
|
||||||
sequenceInformationMap.put(
|
|
||||||
// for now, follow the legacy behavior of storing just the
|
|
||||||
// unqualified sequence name.
|
|
||||||
new QualifiedSequenceName(
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
sequenceInformation.getSequenceName().getSequenceName()
|
|
||||||
),
|
|
||||||
sequenceInformation
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean catalogExists(Identifier catalog) {
|
|
||||||
return extractor.catalogExists( catalog );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean schemaExists(Namespace.Name namespace) {
|
|
||||||
return extractor.schemaExists( namespace.getCatalog(), namespace.getSchema() );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TableInformation getTableInformation(
|
|
||||||
Identifier catalogName,
|
|
||||||
Identifier schemaName,
|
|
||||||
Identifier tableName) {
|
|
||||||
return getTableInformation( new QualifiedTableName( catalogName, schemaName, tableName ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TableInformation getTableInformation(
|
|
||||||
Namespace.Name namespace,
|
|
||||||
Identifier tableName) {
|
|
||||||
return getTableInformation( new QualifiedTableName( namespace, tableName ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TableInformation getTableInformation(QualifiedTableName tableName) {
|
|
||||||
if ( tableName.getObjectName() == null ) {
|
|
||||||
throw new IllegalArgumentException( "Passed table name cannot be null" );
|
|
||||||
}
|
|
||||||
|
|
||||||
return extractor.getTable(
|
|
||||||
tableName.getCatalogName(),
|
|
||||||
tableName.getSchemaName(),
|
|
||||||
tableName.getTableName()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SequenceInformation getSequenceInformation(
|
|
||||||
Identifier catalogName,
|
|
||||||
Identifier schemaName,
|
|
||||||
Identifier sequenceName) {
|
|
||||||
return getSequenceInformation( new QualifiedSequenceName( catalogName, schemaName, sequenceName ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName) {
|
|
||||||
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName) {
|
|
||||||
return locateSequenceInformation( sequenceName );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void cleanup() {
|
|
||||||
extractionContext.cleanup();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TableInformation locateTableInformation(QualifiedTableName tableName) {
|
|
||||||
return getTableInformation( tableName );
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SequenceInformation locateSequenceInformation(QualifiedSequenceName sequenceName) {
|
|
||||||
// again, follow legacy behavior
|
|
||||||
if ( sequenceName.getCatalogName() != null || sequenceName.getSchemaName() != null ) {
|
|
||||||
sequenceName = new QualifiedSequenceName( null, null, sequenceName.getSequenceName() );
|
|
||||||
}
|
|
||||||
|
|
||||||
return sequenceInformationMap.get( sequenceName );
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -6,6 +6,7 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.test.schematools;
|
package org.hibernate.test.schematools;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
import org.hibernate.boot.MetadataSources;
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
@ -14,17 +15,22 @@ import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
import org.hibernate.boot.spi.MetadataImplementor;
|
import org.hibernate.boot.spi.MetadataImplementor;
|
||||||
import org.hibernate.cfg.Environment;
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
|
||||||
|
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator;
|
||||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||||
|
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
|
||||||
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
||||||
import org.hibernate.tool.schema.extract.internal.ExtractionContextImpl;
|
import org.hibernate.tool.schema.extract.internal.ExtractionContextImpl;
|
||||||
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
|
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
||||||
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
|
||||||
|
import org.hibernate.tool.schema.internal.exec.JdbcContext;
|
||||||
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.hibernate.testing.TestForIssue;
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
import org.hibernate.test.util.DdlTransactionIsolatorTestingImpl;
|
||||||
|
|
||||||
import static org.hamcrest.core.Is.is;
|
import static org.hamcrest.core.Is.is;
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
|
@ -37,7 +43,6 @@ public class TestExtraPhysicalTableTypes {
|
||||||
|
|
||||||
private StandardServiceRegistry ssr;
|
private StandardServiceRegistry ssr;
|
||||||
private MetadataImplementor metadata;
|
private MetadataImplementor metadata;
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void tearDown() {
|
public void tearDown() {
|
||||||
StandardServiceRegistryBuilder.destroy( ssr );
|
StandardServiceRegistryBuilder.destroy( ssr );
|
||||||
|
@ -84,12 +89,16 @@ public class TestExtraPhysicalTableTypes {
|
||||||
private InformationExtractorJdbcDatabaseMetaDataImplTest buildInformationExtractorJdbcDatabaseMetaDataImplTest()
|
private InformationExtractorJdbcDatabaseMetaDataImplTest buildInformationExtractorJdbcDatabaseMetaDataImplTest()
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
Database database = metadata.getDatabase();
|
Database database = metadata.getDatabase();
|
||||||
|
|
||||||
|
final ConnectionProvider connectionProvider = ssr.getService( ConnectionProvider.class );
|
||||||
DatabaseInformation dbInfo = new DatabaseInformationImpl(
|
DatabaseInformation dbInfo = new DatabaseInformationImpl(
|
||||||
ssr,
|
ssr,
|
||||||
database.getJdbcEnvironment(),
|
database.getJdbcEnvironment(),
|
||||||
ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess(),
|
new DdlTransactionIsolatorTestingImpl( ssr,
|
||||||
database.getDefaultNamespace().getPhysicalName().getCatalog(),
|
new JdbcEnvironmentInitiator.ConnectionProviderJdbcConnectionAccess(
|
||||||
database.getDefaultNamespace().getPhysicalName().getSchema()
|
connectionProvider )
|
||||||
|
),
|
||||||
|
database.getDefaultNamespace().getName()
|
||||||
);
|
);
|
||||||
ExtractionContextImpl extractionContext = new ExtractionContextImpl(
|
ExtractionContextImpl extractionContext = new ExtractionContextImpl(
|
||||||
ssr,
|
ssr,
|
||||||
|
@ -119,7 +128,6 @@ public class TestExtraPhysicalTableTypes {
|
||||||
}
|
}
|
||||||
|
|
||||||
public class InformationExtractorJdbcDatabaseMetaDataImplTest extends InformationExtractorJdbcDatabaseMetaDataImpl {
|
public class InformationExtractorJdbcDatabaseMetaDataImplTest extends InformationExtractorJdbcDatabaseMetaDataImpl {
|
||||||
|
|
||||||
public InformationExtractorJdbcDatabaseMetaDataImplTest(ExtractionContext extractionContext) {
|
public InformationExtractorJdbcDatabaseMetaDataImplTest(ExtractionContext extractionContext) {
|
||||||
super( extractionContext );
|
super( extractionContext );
|
||||||
}
|
}
|
||||||
|
@ -128,4 +136,27 @@ public class TestExtraPhysicalTableTypes {
|
||||||
return super.isPhysicalTableType( tableType );
|
return super.isPhysicalTableType( tableType );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class DdlTransactionIsolatorImpl implements DdlTransactionIsolator{
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JdbcContext getJdbcContext() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void prepare() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Connection getIsolatedConnection() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,114 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.schemaupdate;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.EnumSet;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
|
||||||
|
import org.hibernate.boot.Metadata;
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||||
|
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
|
import org.hibernate.tool.schema.TargetType;
|
||||||
|
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
|
import static org.hamcrest.core.Is.is;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*/
|
||||||
|
@RunWith(Parameterized.class)
|
||||||
|
public class ColumnNamesTest {
|
||||||
|
@Parameterized.Parameters
|
||||||
|
public static Collection<String> parameters() {
|
||||||
|
return Arrays.asList(
|
||||||
|
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Parameterized.Parameter
|
||||||
|
public String jdbcMetadataExtractorStrategy;
|
||||||
|
|
||||||
|
private StandardServiceRegistry ssr;
|
||||||
|
private Metadata metadata;
|
||||||
|
private File output;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() throws IOException {
|
||||||
|
ssr = new StandardServiceRegistryBuilder()
|
||||||
|
.applySetting( AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true" )
|
||||||
|
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
|
||||||
|
.build();
|
||||||
|
output = File.createTempFile( "update_script", ".sql" );
|
||||||
|
output.deleteOnExit();
|
||||||
|
|
||||||
|
metadata = new MetadataSources( ssr )
|
||||||
|
.addAnnotatedClass( Employee.class )
|
||||||
|
.buildMetadata();
|
||||||
|
new SchemaExport().create( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() {
|
||||||
|
try {
|
||||||
|
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
StandardServiceRegistryBuilder.destroy( ssr );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSchemaUpdateWithQuotedColumnNames() throws Exception {
|
||||||
|
new SchemaUpdate()
|
||||||
|
.setOutputFile( output.getAbsolutePath() )
|
||||||
|
.execute(
|
||||||
|
EnumSet.of( TargetType.SCRIPT ),
|
||||||
|
metadata
|
||||||
|
);
|
||||||
|
|
||||||
|
final String fileContent = new String( Files.readAllBytes( output.toPath() ) );
|
||||||
|
assertThat( "The update output file should be empty", fileContent, is( "" ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "Employee")
|
||||||
|
public class Employee {
|
||||||
|
@Id
|
||||||
|
private long id;
|
||||||
|
|
||||||
|
@Column(name = "`Age`")
|
||||||
|
public String age;
|
||||||
|
|
||||||
|
@Column(name = "Name")
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private String match;
|
||||||
|
|
||||||
|
private String birthday;
|
||||||
|
|
||||||
|
private String homeAddress;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,214 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.schemaupdate;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.EnumSet;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
import javax.persistence.CollectionTable;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.ElementCollection;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.ForeignKey;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Index;
|
||||||
|
import javax.persistence.Inheritance;
|
||||||
|
import javax.persistence.InheritanceType;
|
||||||
|
import javax.persistence.JoinColumn;
|
||||||
|
import javax.persistence.ManyToMany;
|
||||||
|
import javax.persistence.ManyToOne;
|
||||||
|
import javax.persistence.OneToMany;
|
||||||
|
import javax.persistence.PrimaryKeyJoinColumn;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
|
import org.hibernate.boot.spi.MetadataImplementor;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
|
import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||||
|
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||||
|
import org.hibernate.tool.hbm2ddl.SchemaValidator;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
|
import org.hibernate.tool.schema.TargetType;
|
||||||
|
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
|
import static org.hamcrest.core.Is.is;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*/
|
||||||
|
@RunWith(Parameterized.class)
|
||||||
|
public class SchemaUpdateTest {
|
||||||
|
@Parameterized.Parameters
|
||||||
|
public static Collection<String> parameters() {
|
||||||
|
return Arrays.asList(
|
||||||
|
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Parameterized.Parameter
|
||||||
|
public String jdbcMetadataExtractorStrategy;
|
||||||
|
|
||||||
|
private File output;
|
||||||
|
private StandardServiceRegistry ssr;
|
||||||
|
private MetadataImplementor metadata;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() throws IOException {
|
||||||
|
output = File.createTempFile( "update_script", ".sql" );
|
||||||
|
output.deleteOnExit();
|
||||||
|
ssr = new StandardServiceRegistryBuilder()
|
||||||
|
.applySetting( AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true" )
|
||||||
|
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
|
||||||
|
.build();
|
||||||
|
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||||
|
metadataSources.addAnnotatedClass( LowercaseTableNameEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( TestEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( UppercaseTableNameEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( MixedCaseTableNameEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( Match.class );
|
||||||
|
metadataSources.addAnnotatedClass( InheritanceRootEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( InheritanceChildEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( InheritanceSecondChildEntity.class );
|
||||||
|
|
||||||
|
metadata = (MetadataImplementor) metadataSources.buildMetadata();
|
||||||
|
metadata.validate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearsDown() {
|
||||||
|
new SchemaExport().setHaltOnError( true )
|
||||||
|
.setOutputFile( output.getAbsolutePath() )
|
||||||
|
.setFormat( false )
|
||||||
|
.drop( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||||
|
StandardServiceRegistryBuilder.destroy( ssr );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSchemaUpdateAndValidation() throws Exception {
|
||||||
|
|
||||||
|
new SchemaUpdate().setHaltOnError( true )
|
||||||
|
.execute( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||||
|
|
||||||
|
new SchemaValidator().validate( metadata );
|
||||||
|
|
||||||
|
new SchemaUpdate().setHaltOnError( true )
|
||||||
|
.setOutputFile( output.getAbsolutePath() )
|
||||||
|
.setFormat( false )
|
||||||
|
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
|
||||||
|
|
||||||
|
final String fileContent = new String( Files.readAllBytes( output.toPath() ) );
|
||||||
|
assertThat( "The update output file should be empty", fileContent, is( "" ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "TestEntity")
|
||||||
|
@Table(name = "`testentity`")
|
||||||
|
public static class LowercaseTableNameEntity {
|
||||||
|
@Id
|
||||||
|
long id;
|
||||||
|
String field1;
|
||||||
|
|
||||||
|
@ManyToMany(mappedBy = "entities")
|
||||||
|
Set<TestEntity> entity1s;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "TestEntity1")
|
||||||
|
public static class TestEntity {
|
||||||
|
@Id
|
||||||
|
@Column(name = "`Id`")
|
||||||
|
long id;
|
||||||
|
String field1;
|
||||||
|
|
||||||
|
@ManyToMany
|
||||||
|
Set<LowercaseTableNameEntity> entities;
|
||||||
|
|
||||||
|
@OneToMany
|
||||||
|
@JoinColumn
|
||||||
|
private Set<UppercaseTableNameEntity> entitie2s;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
private LowercaseTableNameEntity entity;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "TestEntity2")
|
||||||
|
@Table(name = "`TESTENTITY`")
|
||||||
|
public static class UppercaseTableNameEntity {
|
||||||
|
@Id
|
||||||
|
long id;
|
||||||
|
String field1;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
TestEntity testEntity;
|
||||||
|
|
||||||
|
@ManyToOne
|
||||||
|
@JoinColumn(foreignKey = @ForeignKey(name = "FK_mixedCase"))
|
||||||
|
MixedCaseTableNameEntity mixedCaseTableNameEntity;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "TestEntity3")
|
||||||
|
@Table(name = "`TESTentity`", indexes = {@Index(name = "index1", columnList = "`FieLd1`"), @Index(name = "Index2", columnList = "`FIELD_2`")})
|
||||||
|
public static class MixedCaseTableNameEntity {
|
||||||
|
@Id
|
||||||
|
long id;
|
||||||
|
@Column(name = "`FieLd1`")
|
||||||
|
String field1;
|
||||||
|
@Column(name = "`FIELD_2`")
|
||||||
|
String field2;
|
||||||
|
@Column(name = "`field_3`")
|
||||||
|
String field3;
|
||||||
|
String field4;
|
||||||
|
|
||||||
|
@OneToMany
|
||||||
|
@JoinColumn
|
||||||
|
private Set<Match> matches = new HashSet<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "Match")
|
||||||
|
public static class Match {
|
||||||
|
@Id
|
||||||
|
long id;
|
||||||
|
String match;
|
||||||
|
|
||||||
|
@ElementCollection
|
||||||
|
@CollectionTable
|
||||||
|
private Map<Integer, Integer> timeline = new TreeMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "InheritanceRootEntity")
|
||||||
|
@Inheritance(strategy = InheritanceType.JOINED)
|
||||||
|
public static class InheritanceRootEntity {
|
||||||
|
@Id
|
||||||
|
protected Long id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "InheritanceChildEntity")
|
||||||
|
@PrimaryKeyJoinColumn(name = "ID", foreignKey = @ForeignKey(name = "FK_ROOT"))
|
||||||
|
public static class InheritanceChildEntity extends InheritanceRootEntity {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity(name = "InheritanceSecondChildEntity")
|
||||||
|
@PrimaryKeyJoinColumn(name = "ID")
|
||||||
|
public static class InheritanceSecondChildEntity extends InheritanceRootEntity {
|
||||||
|
@ManyToOne
|
||||||
|
@JoinColumn
|
||||||
|
public Match match;
|
||||||
|
}
|
||||||
|
}
|
|
@ -27,25 +27,28 @@ import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||||
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||||
import org.hibernate.tool.schema.SourceType;
|
import org.hibernate.tool.schema.SourceType;
|
||||||
import org.hibernate.tool.schema.TargetType;
|
import org.hibernate.tool.schema.TargetType;
|
||||||
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
|
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
|
||||||
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
|
|
||||||
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
||||||
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
|
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
|
||||||
|
import org.hibernate.tool.schema.internal.GroupedSchemaMigratorImpl;
|
||||||
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
|
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
|
||||||
import org.hibernate.tool.schema.internal.SchemaMigratorImpl;
|
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
|
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
|
||||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||||
|
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||||
import org.hibernate.tool.schema.spi.SourceDescriptor;
|
import org.hibernate.tool.schema.spi.SourceDescriptor;
|
||||||
|
import org.hibernate.tool.schema.spi.TargetDescriptor;
|
||||||
|
|
||||||
import org.hibernate.testing.DialectChecks;
|
import org.hibernate.testing.DialectChecks;
|
||||||
import org.hibernate.testing.RequiresDialectFeature;
|
import org.hibernate.testing.RequiresDialectFeature;
|
||||||
import org.hibernate.testing.TestForIssue;
|
import org.hibernate.testing.TestForIssue;
|
||||||
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
||||||
import org.hibernate.test.tool.schema.TargetDatabaseImpl;
|
import org.hibernate.test.tool.schema.TargetDatabaseImpl;
|
||||||
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -136,20 +139,8 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
|
||||||
final Database database = metadata.getDatabase();
|
final Database database = metadata.getDatabase();
|
||||||
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) ssr.getService( SchemaManagementTool.class );
|
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) ssr.getService( SchemaManagementTool.class );
|
||||||
|
|
||||||
DatabaseInformation dbInfo = new DatabaseInformationImpl(
|
|
||||||
ssr,
|
|
||||||
database.getJdbcEnvironment(),
|
|
||||||
ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess(),
|
|
||||||
database.getDefaultNamespace().getPhysicalName().getCatalog(),
|
|
||||||
database.getDefaultNamespace().getPhysicalName().getSchema()
|
|
||||||
);
|
|
||||||
|
|
||||||
final Map configurationValues = ssr.getService( ConfigurationService.class ).getSettings();
|
final Map configurationValues = ssr.getService( ConfigurationService.class ).getSettings();
|
||||||
|
final ExecutionOptions options = new ExecutionOptions() {
|
||||||
new SchemaMigratorImpl( tool ).doMigration(
|
|
||||||
metadata,
|
|
||||||
dbInfo,
|
|
||||||
new ExecutionOptions() {
|
|
||||||
@Override
|
@Override
|
||||||
public boolean shouldManageNamespaces() {
|
public boolean shouldManageNamespaces() {
|
||||||
return true;
|
return true;
|
||||||
|
@ -164,14 +155,47 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
|
||||||
public ExceptionHandler getExceptionHandler() {
|
public ExceptionHandler getExceptionHandler() {
|
||||||
return ExceptionHandlerLoggedImpl.INSTANCE;
|
return ExceptionHandlerLoggedImpl.INSTANCE;
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
ssr.getService( JdbcEnvironment.class ).getDialect(),
|
|
||||||
buildTargets()
|
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
|
||||||
|
metadata,
|
||||||
|
options,
|
||||||
|
TargetDescriptorImpl.INSTANCE
|
||||||
);
|
);
|
||||||
|
|
||||||
new SchemaDropperImpl( tool ).doDrop(
|
new SchemaDropperImpl( tool ).doDrop(
|
||||||
metadata,
|
metadata,
|
||||||
new ExecutionOptions() {
|
options,
|
||||||
|
ssr.getService( JdbcEnvironment.class ).getDialect(),
|
||||||
|
new SourceDescriptor() {
|
||||||
|
@Override
|
||||||
|
public SourceType getSourceType() {
|
||||||
|
return SourceType.METADATA;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScriptSourceInput getScriptSourceInput() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
buildTargets()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@TestForIssue(jiraKey = "HHH-10420")
|
||||||
|
public void testImprovedSchemaMigrationForeignKeysAreGeneratedAfterAllTheTablesAreCreated() throws Exception {
|
||||||
|
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||||
|
metadataSources.addAnnotatedClass( SchemaOneEntity.class );
|
||||||
|
metadataSources.addAnnotatedClass( SchemaTwoEntity.class );
|
||||||
|
|
||||||
|
MetadataImplementor metadata = (MetadataImplementor) metadataSources.buildMetadata();
|
||||||
|
metadata.validate();
|
||||||
|
|
||||||
|
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) ssr.getService( SchemaManagementTool.class );
|
||||||
|
|
||||||
|
final Map configurationValues = ssr.getService( ConfigurationService.class ).getSettings();
|
||||||
|
final ExecutionOptions options = new ExecutionOptions() {
|
||||||
@Override
|
@Override
|
||||||
public boolean shouldManageNamespaces() {
|
public boolean shouldManageNamespaces() {
|
||||||
return true;
|
return true;
|
||||||
|
@ -186,7 +210,17 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
|
||||||
public ExceptionHandler getExceptionHandler() {
|
public ExceptionHandler getExceptionHandler() {
|
||||||
return ExceptionHandlerLoggedImpl.INSTANCE;
|
return ExceptionHandlerLoggedImpl.INSTANCE;
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
|
|
||||||
|
new GroupedSchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
|
||||||
|
metadata,
|
||||||
|
options,
|
||||||
|
TargetDescriptorImpl.INSTANCE
|
||||||
|
);
|
||||||
|
|
||||||
|
new SchemaDropperImpl( tool ).doDrop(
|
||||||
|
metadata,
|
||||||
|
options,
|
||||||
ssr.getService( JdbcEnvironment.class ).getDialect(),
|
ssr.getService( JdbcEnvironment.class ).getDialect(),
|
||||||
new SourceDescriptor() {
|
new SourceDescriptor() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -209,4 +243,19 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
|
||||||
new TargetDatabaseImpl( ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess() )
|
new TargetDatabaseImpl( ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess() )
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static class TargetDescriptorImpl implements TargetDescriptor {
|
||||||
|
|
||||||
|
public static final TargetDescriptorImpl INSTANCE = new TargetDescriptorImpl();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public EnumSet<TargetType> getTargetTypes() {
|
||||||
|
return EnumSet.of( TargetType.DATABASE );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScriptTargetOutput getScriptTargetOutput() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.test.schemavalidation;
|
package org.hibernate.test.schemavalidation;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import javax.persistence.Entity;
|
import javax.persistence.Entity;
|
||||||
|
@ -16,7 +18,9 @@ import org.hibernate.boot.MetadataSources;
|
||||||
import org.hibernate.boot.registry.StandardServiceRegistry;
|
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
import org.hibernate.boot.spi.MetadataImplementor;
|
import org.hibernate.boot.spi.MetadataImplementor;
|
||||||
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.engine.config.spi.ConfigurationService;
|
import org.hibernate.engine.config.spi.ConfigurationService;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
import org.hibernate.tool.schema.SourceType;
|
import org.hibernate.tool.schema.SourceType;
|
||||||
import org.hibernate.tool.schema.TargetType;
|
import org.hibernate.tool.schema.TargetType;
|
||||||
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
||||||
|
@ -29,21 +33,35 @@ import org.hibernate.tool.schema.spi.SourceDescriptor;
|
||||||
import org.hibernate.tool.schema.spi.TargetDescriptor;
|
import org.hibernate.tool.schema.spi.TargetDescriptor;
|
||||||
|
|
||||||
import org.hibernate.testing.TestForIssue;
|
import org.hibernate.testing.TestForIssue;
|
||||||
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
@TestForIssue( jiraKey = "HHH-9693" )
|
@TestForIssue(jiraKey = "HHH-9693")
|
||||||
public class LongVarcharValidationTest extends BaseUnitTestCase implements ExecutionOptions {
|
@RunWith(Parameterized.class)
|
||||||
|
public class LongVarcharValidationTest implements ExecutionOptions {
|
||||||
|
@Parameterized.Parameters
|
||||||
|
public static Collection<String> parameters() {
|
||||||
|
return Arrays.asList(
|
||||||
|
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Parameterized.Parameter
|
||||||
|
public String jdbcMetadataExtractorStrategy;
|
||||||
|
|
||||||
private StandardServiceRegistry ssr;
|
private StandardServiceRegistry ssr;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeTest() {
|
public void beforeTest() {
|
||||||
ssr = new StandardServiceRegistryBuilder().build();
|
ssr = new StandardServiceRegistryBuilder()
|
||||||
|
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
|
@ -137,13 +155,13 @@ public class LongVarcharValidationTest extends BaseUnitTestCase implements Execu
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Entity(name = "Translation")
|
@Entity(name = "Translation")
|
||||||
public static class Translation {
|
public static class Translation {
|
||||||
@Id
|
@Id
|
||||||
public Integer id;
|
public Integer id;
|
||||||
@Type( type = "text" )
|
@Type(type = "text")
|
||||||
String text;
|
String text;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map getConfigurationValues() {
|
public Map getConfigurationValues() {
|
||||||
|
|
|
@ -13,13 +13,18 @@ import javax.persistence.Id;
|
||||||
import javax.persistence.Table;
|
import javax.persistence.Table;
|
||||||
|
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
|
import org.hibernate.boot.MetadataSources;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||||
|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||||
import org.hibernate.cfg.AvailableSettings;
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.cfg.Configuration;
|
|
||||||
import org.hibernate.dialect.Oracle9iDialect;
|
import org.hibernate.dialect.Oracle9iDialect;
|
||||||
import org.hibernate.tool.hbm2ddl.SchemaValidator;
|
import org.hibernate.tool.hbm2ddl.SchemaValidator;
|
||||||
|
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||||
|
|
||||||
import org.hibernate.testing.RequiresDialect;
|
import org.hibernate.testing.RequiresDialect;
|
||||||
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,34 +33,86 @@ import org.junit.Test;
|
||||||
*
|
*
|
||||||
* @author Brett Meyer
|
* @author Brett Meyer
|
||||||
*/
|
*/
|
||||||
@RequiresDialect( Oracle9iDialect.class )
|
@RequiresDialect(Oracle9iDialect.class)
|
||||||
public class SynonymValidationTest extends BaseNonConfigCoreFunctionalTestCase {
|
public class SynonymValidationTest extends BaseNonConfigCoreFunctionalTestCase {
|
||||||
|
private StandardServiceRegistry ssr;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Class<?>[] getAnnotatedClasses() {
|
protected Class<?>[] getAnnotatedClasses() {
|
||||||
return new Class<?>[] { TestEntity.class };
|
return new Class<?>[] {TestEntity.class};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Before
|
||||||
public void testSynonymValidation() {
|
public void setUp() {
|
||||||
Session s = openSession();
|
Session s = openSession();
|
||||||
|
try {
|
||||||
s.getTransaction().begin();
|
s.getTransaction().begin();
|
||||||
s.createSQLQuery( "CREATE SYNONYM test_synonym FOR test_entity" ).executeUpdate();
|
s.createSQLQuery( "CREATE SYNONYM test_synonym FOR test_entity" ).executeUpdate();
|
||||||
s.getTransaction().commit();
|
s.getTransaction().commit();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
if ( s.getTransaction().isActive() ) {
|
||||||
|
s.getTransaction().rollback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally {
|
||||||
s.close();
|
s.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Configuration cfg = new Configuration();
|
@After
|
||||||
cfg.addAnnotatedClass( TestEntityWithSynonym.class );
|
public void tearDown() {
|
||||||
cfg.setProperty( AvailableSettings.ENABLE_SYNONYMS, "true" );
|
Session s = openSession();
|
||||||
|
try {
|
||||||
new SchemaValidator().validate( metadata() );
|
|
||||||
|
|
||||||
s = openSession();
|
|
||||||
s.getTransaction().begin();
|
s.getTransaction().begin();
|
||||||
s.createSQLQuery( "DROP SYNONYM test_synonym FORCE" ).executeUpdate();
|
s.createSQLQuery( "DROP SYNONYM test_synonym FORCE" ).executeUpdate();
|
||||||
s.getTransaction().commit();
|
s.getTransaction().commit();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
if ( s.getTransaction().isActive() ) {
|
||||||
|
s.getTransaction().rollback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally {
|
||||||
s.close();
|
s.close();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSynonymUsingImprovedSchemaValidar() {
|
||||||
|
ssr = new StandardServiceRegistryBuilder()
|
||||||
|
.applySetting( AvailableSettings.ENABLE_SYNONYMS, "true" )
|
||||||
|
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, JdbcMetadaAccessStrategy.GROUPED )
|
||||||
|
.build();
|
||||||
|
try {
|
||||||
|
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||||
|
metadataSources.addAnnotatedClass( TestEntityWithSynonym.class );
|
||||||
|
metadataSources.addAnnotatedClass( TestEntity.class );
|
||||||
|
|
||||||
|
new SchemaValidator().validate( metadataSources.buildMetadata() );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
StandardServiceRegistryBuilder.destroy( ssr );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSynonymUsingSchemaValidator() {
|
||||||
|
ssr = new StandardServiceRegistryBuilder()
|
||||||
|
.applySetting( AvailableSettings.ENABLE_SYNONYMS, "true" )
|
||||||
|
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, JdbcMetadaAccessStrategy.GROUPED )
|
||||||
|
.build();
|
||||||
|
try {
|
||||||
|
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||||
|
metadataSources.addAnnotatedClass( TestEntityWithSynonym.class );
|
||||||
|
metadataSources.addAnnotatedClass( TestEntity.class );
|
||||||
|
|
||||||
|
new SchemaValidator().validate( metadataSources.buildMetadata() );
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
StandardServiceRegistryBuilder.destroy( ssr );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "test_entity")
|
@Table(name = "test_entity")
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||||
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||||
|
*/
|
||||||
|
package org.hibernate.test.tool.schema;
|
||||||
|
|
||||||
|
import org.hibernate.boot.spi.MetadataImplementor;
|
||||||
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
|
||||||
|
import org.hibernate.tool.schema.internal.GroupedSchemaValidatorImpl;
|
||||||
|
|
||||||
|
import org.hibernate.testing.RequiresDialect;
|
||||||
|
import org.hibernate.testing.TestForIssue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Andrea Boriero
|
||||||
|
*/
|
||||||
|
@TestForIssue(jiraKey = "HHH-10332")
|
||||||
|
@RequiresDialect(H2Dialect.class)
|
||||||
|
public class GroupedSchemaValidatorImplTest extends IndividuallySchemaValidatorImplTest {
|
||||||
|
@Override
|
||||||
|
protected void getSchemaValidator(MetadataImplementor metadata) {
|
||||||
|
new GroupedSchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
|
||||||
|
.doValidation( metadata, executionOptions );
|
||||||
|
}
|
||||||
|
}
|
|
@ -26,11 +26,12 @@ import org.hibernate.engine.config.spi.ConfigurationService;
|
||||||
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
|
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
|
||||||
import org.hibernate.internal.CoreMessageLogger;
|
import org.hibernate.internal.CoreMessageLogger;
|
||||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||||
|
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
|
||||||
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
||||||
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
|
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
|
||||||
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
|
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
|
||||||
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
|
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
|
||||||
import org.hibernate.tool.schema.internal.SchemaValidatorImpl;
|
import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
|
||||||
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
|
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
|
||||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||||
|
@ -58,19 +59,19 @@ import static org.junit.Assert.assertEquals;
|
||||||
*/
|
*/
|
||||||
@TestForIssue(jiraKey = "HHH-10332")
|
@TestForIssue(jiraKey = "HHH-10332")
|
||||||
@RequiresDialect(H2Dialect.class)
|
@RequiresDialect(H2Dialect.class)
|
||||||
public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public LoggerInspectionRule logInspection = new LoggerInspectionRule(
|
public LoggerInspectionRule logInspection = new LoggerInspectionRule(
|
||||||
Logger.getMessageLogger( CoreMessageLogger.class, SchemaValidatorImplTest.class.getName() ) );
|
Logger.getMessageLogger( CoreMessageLogger.class, IndividuallySchemaValidatorImplTest.class.getName() ) );
|
||||||
|
|
||||||
private StandardServiceRegistry ssr;
|
private StandardServiceRegistry ssr;
|
||||||
|
|
||||||
private HibernateSchemaManagementTool tool;
|
protected HibernateSchemaManagementTool tool;
|
||||||
|
|
||||||
private Map configurationValues;
|
private Map configurationValues;
|
||||||
|
|
||||||
private ExecutionOptions executionOptions;
|
protected ExecutionOptions executionOptions;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws IOException {
|
public void setUp() throws IOException {
|
||||||
|
@ -112,7 +113,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
metadata.validate();
|
metadata.validate();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
|
getSchemaValidator( metadata );
|
||||||
Assert.fail( "SchemaManagementException expected" );
|
Assert.fail( "SchemaManagementException expected" );
|
||||||
}
|
}
|
||||||
catch (SchemaManagementException e) {
|
catch (SchemaManagementException e) {
|
||||||
|
@ -129,7 +130,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
metadata.validate();
|
metadata.validate();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
|
getSchemaValidator( metadata );
|
||||||
Assert.fail( "SchemaManagementException expected" );
|
Assert.fail( "SchemaManagementException expected" );
|
||||||
}
|
}
|
||||||
catch (SchemaManagementException e) {
|
catch (SchemaManagementException e) {
|
||||||
|
@ -178,7 +179,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
metadata.validate();
|
metadata.validate();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
|
getSchemaValidator( metadata );
|
||||||
Assert.fail( "SchemaManagementException expected" );
|
Assert.fail( "SchemaManagementException expected" );
|
||||||
}
|
}
|
||||||
catch (SchemaManagementException e) {
|
catch (SchemaManagementException e) {
|
||||||
|
@ -233,7 +234,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
metadata.validate();
|
metadata.validate();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
|
getSchemaValidator( metadata );
|
||||||
Assert.fail( "SchemaManagementException expected" );
|
Assert.fail( "SchemaManagementException expected" );
|
||||||
}
|
}
|
||||||
catch (SchemaManagementException e) {
|
catch (SchemaManagementException e) {
|
||||||
|
@ -247,6 +248,11 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void getSchemaValidator(MetadataImplementor metadata) {
|
||||||
|
new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
|
||||||
|
.doValidation( metadata, executionOptions );
|
||||||
|
}
|
||||||
|
|
||||||
protected Properties properties() {
|
protected Properties properties() {
|
||||||
Properties properties = new Properties( );
|
Properties properties = new Properties( );
|
||||||
URL propertiesURL = Thread.currentThread().getContextClassLoader().getResource( "hibernate.properties" );
|
URL propertiesURL = Thread.currentThread().getContextClassLoader().getResource( "hibernate.properties" );
|
Loading…
Reference in New Issue