HHH-11023 - Startup performance regression : schema update/validate

This commit is contained in:
Andrea Boriero 2016-08-29 15:47:03 +01:00
parent a99638372d
commit de3153a8e1
29 changed files with 1741 additions and 820 deletions

View File

@ -601,6 +601,14 @@ If this property is not supplied (or is explicitly `false`), the provider should
Used to specify the `org.hibernate.tool.schema.spi.SchemaFilterProvider` to be used by `create`, `drop`, `migrate`, and `validate` operations on the database schema.
`SchemaFilterProvider` provides filters that can be used to limit the scope of these operations to specific namespaces, tables and sequences. All objects are included by default.
|`hibernate.hbm2ddl.jdbc_metadata_extraction_strategy` |`grouped` (default value) or `individually` a|
Setting to choose the strategy used to access the JDBC Metadata.
Valid options are defined by the `strategy` value of the `org.hibernate.tool.schema.JdbcMetadaAccessStrategy` enum:
`grouped`:: `org.hibernate.tool.schema.spi.SchemaMigrator` and `org.hibernate.tool.schema.spi.SchemaValidator` execute a single `java.sql.DatabaseMetaData#getTables(String, String, String, String[])` call to retrieve all the database table in order to determine if all the `javax.persistence.Entity` have a corresponding mapped database tables.
`individually`:: `org.hibernate.tool.schema.spi.SchemaMigrator` and `org.hibernate.tool.schema.spi.SchemaValidator` execute one `java.sql.DatabaseMetaData#getTables(String, String, String, String[])` call for each `javax.persistence.Entity` in order to determine if a corresponding database table exists.
|`hibernate.hbm2ddl.delimiter` | `;` |Identifies the delimiter to use to separate schema management statements in script outputs.
|`hibernate.schema_management_tool` |A schema name |Used to specify the `org.hibernate.tool.schema.spi.SchemaManagementTool` to use for performing schema management. The default is to use `org.hibernate.tool.schema.internal.HibernateSchemaManagementTool`

View File

@ -10,6 +10,7 @@ import org.hibernate.boot.MetadataBuilder;
import org.hibernate.query.internal.ParameterMetadataImpl;
import org.hibernate.resource.transaction.spi.TransactionCoordinator;
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType;
/**
@ -1310,6 +1311,15 @@ public interface AvailableSettings {
*/
String HBM2DDL_FILTER_PROVIDER = "hibernate.hbm2ddl.schema_filter_provider";
/**
* Setting to choose the strategy used to access the JDBC Metadata.
*
* Valid options are defined by the {@link JdbcMetadaAccessStrategy} enum.
*
* @see JdbcMetadaAccessStrategy
*/
String HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY = "hibernate.hbm2ddl.jdbc_metadata_extraction_strategy";
/**
* Identifies the delimiter to use to separate schema management statements in script outputs
*/

View File

@ -14,6 +14,7 @@ import java.util.Iterator;
import org.hibernate.HibernateException;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
@ -29,13 +30,13 @@ public class Index implements RelationalModel, Exportable, Serializable {
private Table table;
private java.util.List<Column> columns = new ArrayList<Column>();
private java.util.Map<Column, String> columnOrderMap = new HashMap<Column, String>( );
private String name;
private Identifier name;
public String sqlCreateString(Dialect dialect, Mapping mapping, String defaultCatalog, String defaultSchema)
throws HibernateException {
return buildSqlCreateIndexString(
dialect,
getName(),
getQuotedName( dialect ),
getTable(),
getColumnIterator(),
columnOrderMap,
@ -171,7 +172,7 @@ public class Index implements RelationalModel, Exportable, Serializable {
return "drop index " +
StringHelper.qualify(
table.getQualifiedName( dialect, defaultCatalog, defaultSchema ),
name
getQuotedName( dialect )
);
}
@ -219,11 +220,15 @@ public class Index implements RelationalModel, Exportable, Serializable {
}
public String getName() {
return name;
return name == null ? null : name.getText();
}
public void setName(String name) {
this.name = name;
this.name = Identifier.toIdentifier( name );
}
public String getQuotedName(Dialect dialect) {
return name == null ? null : name.render( dialect );
}
@Override

View File

@ -0,0 +1,57 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.util.StringHelper;
/**
* @author Andrea Boriero
*/
public enum JdbcMetadaAccessStrategy {
/**
* The {@link org.hibernate.tool.schema.spi.SchemaMigrator} and {@link org.hibernate.tool.schema.spi.SchemaValidator}
* execute one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call for each
* {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
*/
INDIVIDUALLY( "individually" ),
/**
* The {@link org.hibernate.tool.schema.spi.SchemaMigrator} and {@link org.hibernate.tool.schema.spi.SchemaValidator}
* execute a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* to retrieve all the database table in order to determine all the {@link javax.persistence.Entity} have a mapped database tables.
*/
GROUPED( "grouped" );
private final String strategy;
JdbcMetadaAccessStrategy(String strategy) {
this.strategy = strategy;
}
@Override
public String toString() {
return strategy;
}
public static JdbcMetadaAccessStrategy interpretHbm2ddlSetting(Object value) {
if(value == null){
return GROUPED;
}
String name = value.toString();
if ( StringHelper.isEmpty( name ) || GROUPED.strategy.equals( name ) ) {
return GROUPED;
}
else if ( INDIVIDUALLY.strategy.equals( name ) ) {
return INDIVIDUALLY;
}
else {
throw new IllegalArgumentException( "Unrecognized `" + AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY + "` value : " + name );
}
}
}

View File

@ -14,65 +14,55 @@ import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
/**
* Access to information from the existing database.
*
* @author Steve Ebersole
*/
public class DatabaseInformationImpl implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
public class DatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment;
private final ExtractionContext extractionContext;
private final ImprovedExtractionContextImpl extractionContext;
private final InformationExtractor extractor;
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
public DatabaseInformationImpl(
JdbcEnvironment jdbcEnvironment,
InformationExtractor extractor,
ExtractionContext extractionContext) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = extractionContext;
this.extractor = extractor;
// legacy code did initialize sequences...
initializeSequences();
}
public DatabaseInformationImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
JdbcConnectionAccess jdbcConnectionAccess,
Identifier defaultCatalogName,
Identifier defaultSchemaName) throws SQLException {
DdlTransactionIsolator ddlTransactionIsolator,
Namespace.Name defaultNamespace) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = new ExtractionContextImpl(
this.extractionContext = new ImprovedExtractionContextImpl(
serviceRegistry,
jdbcEnvironment,
jdbcConnectionAccess,
this,
defaultCatalogName,
defaultSchemaName
ddlTransactionIsolator,
defaultNamespace.getCatalog(),
defaultNamespace.getSchema(),
this
);
// todo : make this pluggable
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
// legacy code did initialize sequences...
// because we do not have defined a way to locate sequence info by name
initializeSequences();
}
private void initializeSequences() throws SQLException {
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect().getSequenceInformationExtractor().extractMetadata( extractionContext );
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect()
.getSequenceInformationExtractor()
.extractMetadata( extractionContext );
for ( SequenceInformation sequenceInformation : itr ) {
sequenceInformationMap.put(
// for now, follow the legacy behavior of storing just the
@ -88,8 +78,13 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
}
@Override
public boolean schemaExists(Namespace.Name schema) {
return extractor.schemaExists( schema.getCatalog(), schema.getSchema() );
public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( catalog );
}
@Override
public boolean schemaExists(Namespace.Name namespace) {
return extractor.schemaExists( namespace.getCatalog(), namespace.getSchema() );
}
@Override
@ -102,24 +97,29 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
@Override
public TableInformation getTableInformation(
Namespace.Name schemaName,
Namespace.Name namespace,
Identifier tableName) {
return getTableInformation( new QualifiedTableName( schemaName, tableName ) );
return getTableInformation( new QualifiedTableName( namespace, tableName ) );
}
@Override
public TableInformation getTableInformation(QualifiedTableName qualifiedTableName) {
if ( qualifiedTableName.getObjectName() == null ) {
public TableInformation getTableInformation(QualifiedTableName tableName) {
if ( tableName.getObjectName() == null ) {
throw new IllegalArgumentException( "Passed table name cannot be null" );
}
return extractor.getTable(
qualifiedTableName.getCatalogName(),
qualifiedTableName.getSchemaName(),
qualifiedTableName.getTableName()
tableName.getCatalogName(),
tableName.getSchemaName(),
tableName.getTableName()
);
}
@Override
public NameSpaceTablesInformation getTablesInformation(Namespace namespace) {
return extractor.getTables( namespace.getPhysicalName().getCatalog(), namespace.getPhysicalName().getSchema() );
}
@Override
public SequenceInformation getSequenceInformation(
Identifier catalogName,
@ -129,20 +129,18 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
}
@Override
public SequenceInformation getSequenceInformation(
Namespace.Name schemaName,
Identifier sequenceName) {
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName) {
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
}
@Override
public SequenceInformation getSequenceInformation(QualifiedSequenceName qualifiedSequenceName) {
return locateSequenceInformation( qualifiedSequenceName );
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName) {
return locateSequenceInformation( sequenceName );
}
@Override
public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( catalog );
public void cleanup() {
extractionContext.cleanup();
}
@Override
@ -159,9 +157,4 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
return sequenceInformationMap.get( sequenceName );
}
@Override
public void cleanup() {
extractionContext.cleanup();
}
}

View File

@ -36,6 +36,7 @@ import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.PrimaryKeyInformation;
import org.hibernate.tool.schema.extract.spi.SchemaExtractionException;
import org.hibernate.tool.schema.extract.spi.TableInformation;
@ -192,29 +193,18 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
return extractionContext.getJdbcEnvironment().getIdentifierHelper().toMetaDataSchemaName( identifierToUse );
}
public TableInformation extractTableInformation(
Identifier catalog,
Identifier schema,
Identifier name,
ResultSet resultSet) throws SQLException {
if ( catalog == null ) {
catalog = identifierHelper().toIdentifier( resultSet.getString( "TABLE_CAT" ) );
}
if ( schema == null ) {
schema = identifierHelper().toIdentifier( resultSet.getString( "TABLE_SCHEM" ) );
}
if ( name == null ) {
name = identifierHelper().toIdentifier( resultSet.getString( "TABLE_NAME" ) );
}
private TableInformation extractTableInformation(ResultSet resultSet) throws SQLException {
final QualifiedTableName tableName = extractTableName( resultSet );
final QualifiedTableName tableName = new QualifiedTableName( catalog, schema, name );
return new TableInformationImpl(
final TableInformationImpl tableInformation = new TableInformationImpl(
this,
identifierHelper(),
tableName,
isPhysicalTableType( resultSet.getString( "TABLE_TYPE" ) ),
resultSet.getString( "REMARKS" )
);
addColumns( tableInformation );
return tableInformation;
}
@Override
@ -247,7 +237,6 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
}
// 2) look in default namespace
if ( extractionContext.getDefaultCatalog() != null || extractionContext.getDefaultSchema() != null ) {
tableInfo = locateTableInNamespace(
@ -261,7 +250,6 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
}
// 3) look in all namespaces
try {
final String tableNameFilter = toMetaDataObjectName( tableName );
@ -274,7 +262,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
);
try {
return processGetTableResults(
return processTableResults(
null,
null,
tableName,
@ -295,6 +283,130 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
}
public NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema) {
String catalogFilter = null;
String schemaFilter = null;
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
if ( catalog == null ) {
if ( extractionContext.getJdbcEnvironment().getCurrentCatalog() != null ) {
// 1) look in current namespace
catalogFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentCatalog() );
}
else if ( extractionContext.getDefaultCatalog() != null ) {
// 2) look in default namespace
catalogFilter = toMetaDataObjectName( extractionContext.getDefaultCatalog() );
}
else {
catalogFilter = "";
}
}
else {
catalogFilter = toMetaDataObjectName( catalog );
}
}
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
if ( schema == null ) {
if ( extractionContext.getJdbcEnvironment().getCurrentSchema() != null ) {
// 1) look in current namespace
schemaFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentSchema() );
}
else if ( extractionContext.getDefaultSchema() != null ) {
// 2) look in default namespace
schemaFilter = toMetaDataObjectName( extractionContext.getDefaultSchema() );
}
else {
schemaFilter = "";
}
}
else {
schemaFilter = toMetaDataObjectName( schema );
}
}
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getTables(
catalogFilter,
schemaFilter,
"%",
tableTypes
);
final NameSpaceTablesInformation tablesInformation = processTableResults( resultSet );
populateTablesWithColumns( catalogFilter, schemaFilter, tablesInformation );
return tablesInformation;
}
catch (SQLException sqlException) {
throw convertSQLException( sqlException, "Error accessing table metadata" );
}
}
private void populateTablesWithColumns(
String catalogFilter,
String schemaFilter,
NameSpaceTablesInformation tables) {
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
catalogFilter,
schemaFilter,
null,
"%"
);
try {
String currentTableName = "";
TableInformation currentTable = null;
while ( resultSet.next() ) {
if ( !currentTableName.equals( resultSet.getString( "TABLE_NAME" ) ) ) {
currentTableName = resultSet.getString( "TABLE_NAME" );
currentTable = tables.getTableInformation( currentTableName );
}
if ( currentTable != null ) {
final ColumnInformationImpl columnInformation = new ColumnInformationImpl(
currentTable,
DatabaseIdentifier.toIdentifier( resultSet.getString( "COLUMN_NAME" ) ),
resultSet.getInt( "DATA_TYPE" ),
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
resultSet.getInt( "COLUMN_SIZE" ),
resultSet.getInt( "DECIMAL_DIGITS" ),
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
);
currentTable.addColumn( columnInformation );
}
}
}
finally {
resultSet.close();
}
}
catch (SQLException e) {
throw convertSQLException(
e,
"Error accessing tables metadata"
);
}
}
private NameSpaceTablesInformation processTableResults(ResultSet resultSet) throws SQLException {
try {
NameSpaceTablesInformation tables = new NameSpaceTablesInformation(identifierHelper());
while ( resultSet.next() ) {
final TableInformation tableInformation = extractTableInformation( resultSet );
tables.addTableInformation( tableInformation );
}
return tables;
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
private TableInformation locateTableInNamespace(
Identifier catalog,
Identifier schema,
@ -341,7 +453,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
tableTypes
);
return processGetTableResults(
return processTableResults(
catalogToUse,
schemaToUse,
tableName,
@ -353,7 +465,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
}
private TableInformation processGetTableResults(
private TableInformation processTableResults(
Identifier catalog,
Identifier schema,
Identifier tableName,
@ -362,7 +474,8 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
boolean found = false;
TableInformation tableInformation = null;
while ( resultSet.next() ) {
if ( tableName.equals( Identifier.toIdentifier( resultSet.getString( "TABLE_NAME" ), tableName.isQuoted() ) ) ) {
if ( tableName.equals( Identifier.toIdentifier( resultSet.getString( "TABLE_NAME" ),
tableName.isQuoted() ) ) ) {
if ( found ) {
log.multipleTablesFound( tableName.render() );
final String catalogName = catalog == null ? "" : catalog.render();
@ -379,13 +492,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
else {
found = true;
tableInformation = extractTableInformation(
catalog,
schema,
tableName,
resultSet
);
tableInformation = extractTableInformation( resultSet );
}
}
}
@ -420,69 +527,60 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
}
@Override
public ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier) {
final Identifier catalog = tableInformation.getName().getCatalogName();
final Identifier schema = tableInformation.getName().getSchemaName();
private void addColumns(TableInformation tableInformation) {
final QualifiedTableName tableName = tableInformation.getName();
final Identifier catalog = tableName.getCatalogName();
final Identifier schema = tableName.getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = toMetaDataObjectName( catalog );
}
}
else {
catalogFilter = null;
catalogFilter = catalog.getText();
}
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = toMetaDataObjectName( schema );
}
}
else {
schemaFilter = null;
schemaFilter = schema.getText();
}
final String tableFilter = toMetaDataObjectName( tableInformation.getName().getTableName() );
final String columnFilter = toMetaDataObjectName( columnIdentifier );
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
catalogFilter,
schemaFilter,
tableFilter,
columnFilter
tableName.getTableName().getText(),
"%"
);
try {
if ( !resultSet.next() ) {
return null;
}
return new ColumnInformationImpl(
while ( resultSet.next() ) {
final String columnName = resultSet.getString( "COLUMN_NAME" );
final ColumnInformationImpl columnInformation = new ColumnInformationImpl(
tableInformation,
identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) ),
DatabaseIdentifier.toIdentifier( columnName ),
resultSet.getInt( "DATA_TYPE" ),
new StringTokenizer( resultSet.getString( "TYPE_NAME" ), "() " ).nextToken(),
resultSet.getInt( "COLUMN_SIZE" ),
resultSet.getInt( "DECIMAL_DIGITS" ),
interpretTruthValue( resultSet.getString( "IS_NULLABLE" ) )
);
tableInformation.addColumn( columnInformation );
}
}
finally {
resultSet.close();
}
}
catch (SQLException e) {
throw convertSQLException( e, "Error accessing column metadata: " + tableInformation.getName().toString() );
throw convertSQLException(
e,
"Error accessing column metadata: " + tableName.toString()
);
}
}
@ -498,11 +596,32 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
@Override
public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation) {
final QualifiedTableName tableName = tableInformation.getName();
final Identifier catalog = tableName.getCatalogName();
final Identifier schema = tableName.getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = catalog.getText();
}
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = schema.getText();
}
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getPrimaryKeys(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
catalogFilter,
schemaFilter,
tableInformation.getName().getTableName().getText()
);
final List<ColumnInformation> pkColumns = new ArrayList<ColumnInformation>();
@ -514,7 +633,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
final String currentPkName = resultSet.getString( "PK_NAME" );
final Identifier currentPkIdentifier = currentPkName == null
? null
: identifierHelper().toIdentifier( currentPkName );
: DatabaseIdentifier.toIdentifier( currentPkName );
if ( firstPass ) {
pkIdentifier = currentPkIdentifier;
firstPass = false;
@ -531,9 +650,10 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
}
final int columnPosition = resultSet.getInt( "KEY_SEQ" );
final String columnName = resultSet.getString( "COLUMN_NAME" );
final Identifier columnIdentifier = identifierHelper().toIdentifier( columnName );
final Identifier columnIdentifier = DatabaseIdentifier.toIdentifier(
resultSet.getString( "COLUMN_NAME" )
);
final ColumnInformation column = tableInformation.getColumn( columnIdentifier );
pkColumns.add( columnPosition-1, column );
}
@ -565,13 +685,33 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
@Override
public Iterable<IndexInformation> getIndexes(TableInformation tableInformation) {
final Map<Identifier, IndexInformationImpl.Builder> builders = new HashMap<Identifier, IndexInformationImpl.Builder>();
final Map<Identifier, IndexInformationImpl.Builder> builders = new HashMap<>();
final QualifiedTableName tableName = tableInformation.getName();
final Identifier catalog = tableName.getCatalogName();
final Identifier schema = tableName.getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = catalog.getText();
}
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = schema.getText();
}
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getIndexInfo(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() ),
catalogFilter,
schemaFilter,
tableName.getTableName().getText(),
false, // DO NOT limit to just unique
true // DO require up-to-date results
);
@ -582,10 +722,8 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
continue;
}
final Identifier indexIdentifier = identifierHelper().toIdentifier(
resultSet.getString(
"INDEX_NAME"
)
final Identifier indexIdentifier = DatabaseIdentifier.toIdentifier(
resultSet.getString( "INDEX_NAME" )
);
IndexInformationImpl.Builder builder = builders.get( indexIdentifier );
if ( builder == null ) {
@ -593,7 +731,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
builders.put( indexIdentifier, builder );
}
final Identifier columnIdentifier = identifierHelper().toIdentifier( resultSet.getString( "COLUMN_NAME" ) );
final Identifier columnIdentifier = DatabaseIdentifier.toIdentifier( resultSet.getString( "COLUMN_NAME" ) );
final ColumnInformation columnInformation = tableInformation.getColumn( columnIdentifier );
if ( columnInformation == null ) {
// See HHH-10191: this may happen when dealing with Oracle/PostgreSQL function indexes
@ -626,13 +764,33 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
@Override
public Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation) {
final Map<Identifier, ForeignKeyBuilder> fkBuilders = new HashMap<Identifier, ForeignKeyBuilder>();
final Map<Identifier, ForeignKeyBuilder> fkBuilders = new HashMap<>();
final QualifiedTableName tableName = tableInformation.getName();
final Identifier catalog = tableName.getCatalogName();
final Identifier schema = tableName.getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = catalog.getText();
}
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = schema.getText();
}
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getImportedKeys(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() )
catalogFilter,
schemaFilter,
tableInformation.getName().getTableName().getText()
);
// todo : need to account for getCrossReference() as well...
@ -640,7 +798,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
try {
while ( resultSet.next() ) {
// IMPL NOTE : The builder is mainly used to collect the column reference mappings
final Identifier fkIdentifier = identifierHelper().toIdentifier(
final Identifier fkIdentifier = DatabaseIdentifier.toIdentifier(
resultSet.getString( "FK_NAME" )
);
ForeignKeyBuilder fkBuilder = fkBuilders.get( fkIdentifier );
@ -662,10 +820,10 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
continue;
}
final Identifier fkColumnIdentifier = identifierHelper().toIdentifier(
final Identifier fkColumnIdentifier = DatabaseIdentifier.toIdentifier(
resultSet.getString( "FKCOLUMN_NAME" )
);
final Identifier pkColumnIdentifier = identifierHelper().toIdentifier(
final Identifier pkColumnIdentifier = DatabaseIdentifier.toIdentifier(
resultSet.getString( "PKCOLUMN_NAME" )
);
@ -742,4 +900,16 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
return new QualifiedTableName( catalog, schema, table );
}
private QualifiedTableName extractTableName(ResultSet resultSet) throws SQLException {
final String incomingCatalogName = resultSet.getString( "TABLE_CAT" );
final String incomingSchemaName = resultSet.getString( "TABLE_SCHEM" );
final String incomingTableName = resultSet.getString( "TABLE_NAME" );
final DatabaseIdentifier catalog = DatabaseIdentifier.toIdentifier( incomingCatalogName );
final DatabaseIdentifier schema = DatabaseIdentifier.toIdentifier( incomingSchemaName );
final DatabaseIdentifier table = DatabaseIdentifier.toIdentifier( incomingTableName );
return new QualifiedTableName( catalog, schema, table );
}
}

View File

@ -11,6 +11,7 @@ import java.util.Map;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
@ -27,6 +28,8 @@ import org.hibernate.tool.schema.extract.spi.TableInformation;
*/
public class TableInformationImpl implements TableInformation {
private final InformationExtractor extractor;
private final IdentifierHelper identifierHelper;
private final QualifiedTableName tableName;
private final boolean physicalTable;
private final String comment;
@ -34,15 +37,18 @@ public class TableInformationImpl implements TableInformation {
private PrimaryKeyInformation primaryKey;
private Map<Identifier, ForeignKeyInformation> foreignKeys;
private Map<Identifier, IndexInformation> indexes;
private Map<Identifier, ColumnInformation> columns = new HashMap<>( );
private boolean wasPrimaryKeyLoaded = false; // to avoid multiple db reads since primary key can be null.
public TableInformationImpl(
InformationExtractor extractor,
IdentifierHelper identifierHelper,
QualifiedTableName tableName,
boolean physicalTable,
String comment ) {
this.extractor = extractor;
this.identifierHelper = identifierHelper;
this.tableName = tableName;
this.physicalTable = physicalTable;
this.comment = comment;
@ -65,7 +71,10 @@ public class TableInformationImpl implements TableInformation {
@Override
public ColumnInformation getColumn(Identifier columnIdentifier) {
return extractor.getColumn( this, columnIdentifier );
return columns.get( new Identifier(
identifierHelper.toMetaDataObjectName( columnIdentifier ),
false
) );
}
@Override
@ -84,7 +93,7 @@ public class TableInformationImpl implements TableInformation {
protected Map<Identifier, ForeignKeyInformation> foreignKeys() {
if ( foreignKeys == null ) {
final Map<Identifier, ForeignKeyInformation> fkMap = new HashMap<Identifier, ForeignKeyInformation>();
final Map<Identifier, ForeignKeyInformation> fkMap = new HashMap<>();
final Iterable<ForeignKeyInformation> fks = extractor.getForeignKeys( this );
for ( ForeignKeyInformation fk : fks ) {
fkMap.put( fk.getForeignKeyIdentifier(), fk );
@ -96,7 +105,10 @@ public class TableInformationImpl implements TableInformation {
@Override
public ForeignKeyInformation getForeignKey(Identifier fkIdentifier) {
return foreignKeys().get( fkIdentifier );
return foreignKeys().get( new Identifier(
identifierHelper.toMetaDataObjectName( fkIdentifier ),
false
) );
}
@Override
@ -106,7 +118,7 @@ public class TableInformationImpl implements TableInformation {
protected Map<Identifier, IndexInformation> indexes() {
if ( indexes == null ) {
final Map<Identifier, IndexInformation> indexMap = new HashMap<Identifier, IndexInformation>();
final Map<Identifier, IndexInformation> indexMap = new HashMap<>();
final Iterable<IndexInformation> indexes = extractor.getIndexes( this );
for ( IndexInformation index : indexes ) {
indexMap.put( index.getIndexIdentifier(), index );
@ -116,9 +128,17 @@ public class TableInformationImpl implements TableInformation {
return indexes;
}
@Override
public void addColumn(ColumnInformation columnIdentifier) {
columns.put( columnIdentifier.getColumnIdentifier(), columnIdentifier );
}
@Override
public IndexInformation getIndex(Identifier indexName) {
return indexes().get( indexName );
return indexes().get( new Identifier(
identifierHelper.toMetaDataObjectName( indexName ),
false
) );
}
@Override

View File

@ -60,6 +60,15 @@ public interface DatabaseInformation {
*/
TableInformation getTableInformation(QualifiedTableName tableName);
/**
* Obtain reference to all the {@link TableInformation) for a given {@link Namespace}
*
* @param namespace The {@link Namespace} which contains the {@link TableInformation)
*
* @return a {@link NameSpaceTablesInformation}
*/
NameSpaceTablesInformation getTablesInformation(Namespace namespace);
/**
* Obtain reference to the named SequenceInformation
*

View File

@ -56,15 +56,16 @@ public interface InformationExtractor {
TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName);
/**
* Return information about column for the given table. Typically called from the TableInformation itself
* as part of on-demand initialization of its state.
* Extract all the tables information.
*
* @param tableInformation table info for the matching table
* @param columnIdentifier The column identifier for which to locate column
* @param catalog Can be {@code null}, indicating that any catalog may be considered a match. A
* non-{@code null} value indicates that search should be limited to the passed catalog.
* @param schema Can be {@code null}, indicating that any schema may be considered a match. A
* non-{@code null} value indicates that search should be limited to the passed schema .
*
* @return The extracted column information
* @return a {@link NameSpaceTablesInformation}
*/
ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier);
NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema);
/**
* Extract information about the given table's primary key.

View File

@ -0,0 +1,37 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.extract.spi;
import java.util.HashMap;
import java.util.Map;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.mapping.Table;
/**
* @author Andrea Boriero
*/
public class NameSpaceTablesInformation {
private final IdentifierHelper identifierHelper;
private Map<String, TableInformation> tables = new HashMap<>();
public NameSpaceTablesInformation(IdentifierHelper identifierHelper) {
this.identifierHelper = identifierHelper;
}
public void addTableInformation(TableInformation tableInformation) {
tables.put( tableInformation.getName().getTableName().getText(), tableInformation );
}
public TableInformation getTableInformation(Table table) {
return tables.get( identifierHelper.toMetaDataObjectName( table.getQualifiedTableName().getTableName() ) );
}
public TableInformation getTableInformation(String tableName) {
return tables.get( tableName );
}
}

View File

@ -85,4 +85,6 @@ public interface TableInformation {
* @return The matching index information. May return {@code null}
*/
public IndexInformation getIndex(Identifier indexName);
public void addColumn(ColumnInformation columnIdentifier);
}

View File

@ -6,8 +6,10 @@
*/
package org.hibernate.tool.schema.internal;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.hibernate.boot.Metadata;
@ -34,6 +36,7 @@ import org.hibernate.tool.hbm2ddl.UniqueConstraintSchemaUpdateStrategy;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
@ -50,26 +53,28 @@ import org.jboss.logging.Logger;
import static org.hibernate.cfg.AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY;
/**
* @author Steve Ebersole
*/
public class SchemaMigratorImpl implements SchemaMigrator {
private static final Logger log = Logger.getLogger( SchemaMigratorImpl.class );
public abstract class AbstractSchemaMigrator implements SchemaMigrator {
private static final Logger log = Logger.getLogger( IndividuallySchemaMigratorImpl.class );
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
protected HibernateSchemaManagementTool tool;
protected SchemaFilter schemaFilter;
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
public SchemaMigratorImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
public SchemaMigratorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
public AbstractSchemaMigrator(
HibernateSchemaManagementTool tool,
SchemaFilter schemaFilter) {
this.tool = tool;
if ( schemaFilter == null ) {
this.schemaFilter = DefaultSchemaFilter.INSTANCE;
}
else {
this.schemaFilter = schemaFilter;
}
}
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
/**
* For testing...
@ -80,14 +85,9 @@ public class SchemaMigratorImpl implements SchemaMigrator {
@Override
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
try {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
@ -102,7 +102,23 @@ public class SchemaMigratorImpl implements SchemaMigrator {
);
try {
doMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
finally {
try {
@ -117,31 +133,19 @@ public class SchemaMigratorImpl implements SchemaMigrator {
ddlTransactionIsolator.release();
}
}
}
public void doMigration(
protected abstract NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performMigration( metadata, existingDatabase, options, dialect, targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
Formatter formatter,
Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace, GenerationTarget[] targets);
private void performMigration(
Metadata metadata,
@ -158,28 +162,21 @@ public class SchemaMigratorImpl implements SchemaMigrator {
// Drop all AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
continue;
}
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
applySqlStrings(
true,
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
dialect.getAuxiliaryDatabaseObjectExporter()
.getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
formatter,
options,
targets
);
}
}
// Create beforeQuery-table AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
continue;
}
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
applySqlStrings(
true,
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
@ -188,6 +185,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
targets
);
}
}
boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false;
@ -199,12 +197,289 @@ public class SchemaMigratorImpl implements SchemaMigrator {
tryToCreateCatalogs = true;
}
}
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
final Map<Namespace, NameSpaceTablesInformation> tablesInformation = new HashMap<>();
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
final NameSpaceTablesInformation nameSpaceTablesInformation = performTablesMigration(
metadata,
existingDatabase,
options,
dialect,
formatter,
exportIdentifiers,
tryToCreateCatalogs,
tryToCreateSchemas,
exportedCatalogs,
namespace,
targets
);
tablesInformation.put( namespace, nameSpaceTablesInformation );
if ( schemaFilter.includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) {
checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation == null ) {
applySqlStrings(
false,
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata
),
formatter,
options,
targets
);
}
}
}
}
//NOTE : Foreign keys must be created *afterQuery* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) {
final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) ) {
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}
}
}
// Create afterQuery-table AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.beforeTablesOnCreation() && auxiliaryDatabaseObject.appliesToDialect( dialect )) {
applySqlStrings(
true,
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
formatter,
options,
targets
);
}
}
}
protected void createTable(
Table table,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
applySqlStrings(
false,
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
options,
targets
);
}
protected void migrateTable(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
//noinspection unchecked
applySqlStrings(
false,
table.sqlAlterStrings(
dialect,
metadata,
tableInformation,
getDefaultCatalogName( database, dialect ),
getDefaultSchemaName( database, dialect )
),
formatter,
options,
targets
);
}
protected void applyIndexes(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Exporter<Index> exporter = dialect.getIndexExporter();
final Iterator<Index> indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) {
final Index index = indexItr.next();
if ( !StringHelper.isEmpty( index.getName() ) ) {
IndexInformation existingIndex = null;
if ( tableInformation != null ) {
existingIndex = findMatchingIndex( index, tableInformation );
}
if ( existingIndex == null ) {
applySqlStrings(
false,
exporter.getSqlCreateStrings( index, metadata ),
formatter,
options,
targets
);
}
}
}
}
private IndexInformation findMatchingIndex(Index index, TableInformation tableInformation) {
return tableInformation.getIndex( Identifier.toIdentifier( index.getName() ) );
}
protected void applyUniqueKeys(
Table table,
TableInformation tableInfo,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( uniqueConstraintStrategy == null ) {
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
}
if ( uniqueConstraintStrategy != UniqueConstraintSchemaUpdateStrategy.SKIP ) {
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
final Iterator ukItr = table.getUniqueKeyIterator();
while ( ukItr.hasNext() ) {
final UniqueKey uniqueKey = (UniqueKey) ukItr.next();
// Skip if index already exists. Most of the time, this
// won't work since most Dialects use Constraints. However,
// keep it for the few that do use Indexes.
IndexInformation indexInfo = null;
if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
indexInfo = tableInfo.getIndex( Identifier.toIdentifier( uniqueKey.getName() ) );
}
if ( indexInfo == null ) {
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
applySqlStrings(
true,
exporter.getSqlDropStrings( uniqueKey, metadata ),
formatter,
options,
targets
);
}
applySqlStrings(
true,
exporter.getSqlCreateStrings( uniqueKey, metadata ),
formatter,
options,
targets
);
}
}
}
}
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy(Metadata metadata) {
final ConfigurationService cfgService = ((MetadataImplementor) metadata).getMetadataBuildingOptions()
.getServiceRegistry()
.getService( ConfigurationService.class );
return UniqueConstraintSchemaUpdateStrategy.interpret(
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
);
}
protected void applyForeignKeys(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( dialect.hasAlterTable() ) {
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
@SuppressWarnings("unchecked")
final Iterator<ForeignKey> fkItr = table.getForeignKeyIterator();
while ( fkItr.hasNext() ) {
final ForeignKey foreignKey = fkItr.next();
if ( foreignKey.isPhysicalConstraint() && foreignKey.isCreationEnabled() ) {
ForeignKeyInformation existingForeignKey = null;
if ( tableInformation != null ) {
existingForeignKey = findMatchingForeignKey(
foreignKey,
tableInformation
);
}
if ( existingForeignKey == null ) {
// todo : shouldn't we just drop+recreate if FK exists?
// this follows the existing code from legacy SchemaUpdate which just skipped
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata ),
formatter,
options,
targets
);
}
}
}
}
}
private ForeignKeyInformation findMatchingForeignKey(ForeignKey foreignKey, TableInformation tableInformation) {
if ( foreignKey.getName() == null ) {
return null;
}
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) );
}
protected void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new SchemaManagementException(
String.format(
"Export identifier [%s] encountered more than once",
exportIdentifier
)
);
}
exportIdentifiers.add( exportIdentifier );
}
protected static void applySqlStrings(
boolean quiet,
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings != null ) {
for ( String sqlString : sqlStrings ) {
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
}
protected void createSchemaAndCatalog(
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs, Namespace namespace, GenerationTarget[] targets) {
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
@ -235,322 +510,6 @@ public class SchemaMigratorImpl implements SchemaMigrator {
);
}
}
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
continue;
}
if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, targets );
}
else {
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
continue;
}
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
for ( Sequence sequence : namespace.getSequences() ) {
checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation != null ) {
// nothing we really can do...
continue;
}
applySqlStrings(
false,
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata
),
formatter,
options,
targets
);
}
}
//NOTE : Foreign keys must be created *afterQuery* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
continue;
}
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
// Create afterQuery-table AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
continue;
}
if ( !auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
continue;
}
applySqlStrings(
true,
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
formatter,
options,
targets
);
}
}
private void createTable(
Table table,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
applySqlStrings(
false,
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
options,
targets
);
}
private void migrateTable(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
//noinspection unchecked
applySqlStrings(
false,
table.sqlAlterStrings(
dialect,
metadata,
tableInformation,
getDefaultCatalogName( database, dialect ),
getDefaultSchemaName( database, dialect )
),
formatter,
options,
targets
);
}
private void applyIndexes(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Exporter<Index> exporter = dialect.getIndexExporter();
final Iterator<Index> indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) {
final Index index = indexItr.next();
if ( StringHelper.isEmpty( index.getName() ) ) {
continue;
}
if ( tableInformation != null ) {
final IndexInformation existingIndex = findMatchingIndex( index, tableInformation );
if ( existingIndex != null ) {
continue;
}
}
applySqlStrings(
false,
exporter.getSqlCreateStrings( index, metadata ),
formatter,
options,
targets
);
}
}
private IndexInformation findMatchingIndex(Index index, TableInformation tableInformation) {
return tableInformation.getIndex( Identifier.toIdentifier( index.getName() ) );
}
private void applyUniqueKeys(
Table table,
TableInformation tableInfo,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( uniqueConstraintStrategy == null ) {
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
}
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.SKIP ) {
return;
}
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
final Iterator ukItr = table.getUniqueKeyIterator();
while ( ukItr.hasNext() ) {
final UniqueKey uniqueKey = (UniqueKey) ukItr.next();
// Skip if index already exists. Most of the time, this
// won't work since most Dialects use Constraints. However,
// keep it for the few that do use Indexes.
if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
final IndexInformation indexInfo = tableInfo.getIndex( Identifier.toIdentifier( uniqueKey.getName() ) );
if ( indexInfo != null ) {
continue;
}
}
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
applySqlStrings(
true,
exporter.getSqlDropStrings( uniqueKey, metadata ),
formatter,
options,
targets
);
}
applySqlStrings(
true,
exporter.getSqlCreateStrings( uniqueKey, metadata ),
formatter,
options,
targets
);
}
}
private UniqueConstraintSchemaUpdateStrategy determineUniqueConstraintSchemaUpdateStrategy(Metadata metadata) {
final ConfigurationService cfgService = ((MetadataImplementor) metadata).getMetadataBuildingOptions()
.getServiceRegistry()
.getService( ConfigurationService.class );
return UniqueConstraintSchemaUpdateStrategy.interpret(
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
);
}
private void applyForeignKeys(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( !dialect.hasAlterTable() ) {
return;
}
final Exporter<ForeignKey> exporter = dialect.getForeignKeyExporter();
@SuppressWarnings("unchecked")
final Iterator<ForeignKey> fkItr = table.getForeignKeyIterator();
while ( fkItr.hasNext() ) {
final ForeignKey foreignKey = fkItr.next();
if ( !foreignKey.isPhysicalConstraint() ) {
continue;
}
if ( !foreignKey.isCreationEnabled() ) {
continue;
}
if ( tableInformation != null ) {
final ForeignKeyInformation existingForeignKey = findMatchingForeignKey( foreignKey, tableInformation );
if ( existingForeignKey != null ) {
continue;
}
}
// todo : shouldn't we just drop+recreate if FK exists?
// this follows the existing code from legacy SchemaUpdate which just skipped
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata ),
formatter,
options,
targets
);
}
}
private ForeignKeyInformation findMatchingForeignKey(ForeignKey foreignKey, TableInformation tableInformation) {
if ( foreignKey.getName() == null ) {
return null;
}
return tableInformation.getForeignKey( Identifier.toIdentifier( foreignKey.getName() ) );
}
private void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new SchemaManagementException(
String.format(
"Export identifier [%s] encountered more than once",
exportIdentifier
)
);
}
exportIdentifiers.add( exportIdentifier );
}
private static void applySqlStrings(
boolean quiet,
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( String sqlString : sqlStrings ) {
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
private static void applySqlString(
@ -559,10 +518,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
if ( !StringHelper.isEmpty( sqlString ) ) {
for ( GenerationTarget target : targets ) {
try {
target.accept( formatter.format( sqlString ) );
@ -575,6 +531,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
}
}
}
private static void applySqlStrings(
boolean quiet,
@ -582,15 +539,13 @@ public class SchemaMigratorImpl implements SchemaMigrator {
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
if ( sqlStrings != null ) {
while ( sqlStrings.hasNext() ) {
final String sqlString = sqlStrings.next();
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
}
private String getDefaultCatalogName(Database database, Dialect dialect) {
final Identifier identifier = database.getDefaultNamespace().getPhysicalName().getCatalog();

View File

@ -34,19 +34,22 @@ import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class SchemaValidatorImpl implements SchemaValidator {
private static final Logger log = Logger.getLogger( SchemaValidatorImpl.class );
public abstract class AbstractSchemaValidator implements SchemaValidator {
private static final Logger log = Logger.getLogger( AbstractSchemaValidator.class );
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
protected HibernateSchemaManagementTool tool;
protected SchemaFilter schemaFilter;
public SchemaValidatorImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
public SchemaValidatorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
public AbstractSchemaValidator(
HibernateSchemaManagementTool tool,
SchemaFilter validateFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter;
if ( validateFilter == null ) {
this.schemaFilter = DefaultSchemaFilter.INSTANCE;
}
else {
this.schemaFilter = validateFilter;
}
}
@Override
@ -82,35 +85,15 @@ public class SchemaValidatorImpl implements SchemaValidator {
ExecutionOptions options,
Dialect dialect) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace )) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( !schemaFilter.includeTable( table )) {
continue;
}
if ( !table.isPhysicalTable() ) {
continue;
}
final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName()
);
validateTable( table, tableInformation, metadata, options, dialect );
if ( schemaFilter.includeNamespace( namespace ) ) {
validateTables( metadata, databaseInformation, options, dialect, namespace );
}
}
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace )) {
continue;
}
if ( schemaFilter.includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence )) {
continue;
}
if ( schemaFilter.includeSequence( sequence ) ) {
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation(
sequence.getName()
);
@ -118,6 +101,14 @@ public class SchemaValidatorImpl implements SchemaValidator {
}
}
}
}
}
protected abstract void validateTables(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect, Namespace namespace);
protected void validateTable(
Table table,
@ -137,10 +128,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
final Iterator selectableItr = table.getColumnIterator();
while ( selectableItr.hasNext() ) {
final Selectable selectable = (Selectable) selectableItr.next();
if ( !Column.class.isInstance( selectable ) ) {
continue;
}
if ( Column.class.isInstance( selectable ) ) {
final Column column = (Column) selectable;
final ColumnInformation existingColumn = tableInformation.getColumn( Identifier.toIdentifier( column.getQuotedName() ) );
if ( existingColumn == null ) {
@ -155,6 +143,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
validateColumnType( table, column, existingColumn, metadata, options, dialect );
}
}
}
protected void validateColumnType(
Table table,

View File

@ -0,0 +1,92 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.util.Set;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* to retrieve all the database table in order to determine if all the {@link javax.persistence.Entity} have a mapped database tables.
*/
public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
public GroupedSchemaMigratorImpl(
HibernateSchemaManagementTool tool,
SchemaFilter schemaFilter) {
super( tool, schemaFilter );
}
@Override
protected NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace, GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) {
createSchemaAndCatalog(
existingDatabase,
options,
dialect,
formatter,
tryToCreateCatalogs,
tryToCreateSchemas,
exportedCatalogs,
namespace,
targets
);
final NameSpaceTablesInformation tables = existingDatabase.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = tables.getTableInformation( table );
if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, targets );
}
else if ( tableInformation != null && tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}
}
return tablesInformation;
}
}

View File

@ -0,0 +1,52 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes a single {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* to retrieve all the database table in order to determine if all the {@link javax.persistence.Entity} have a mapped database tables.
*/
public class GroupedSchemaValidatorImpl extends AbstractSchemaValidator {
public GroupedSchemaValidatorImpl(
HibernateSchemaManagementTool tool,
SchemaFilter validateFilter) {
super( tool, validateFilter );
}
@Override
protected void validateTables(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect, Namespace namespace) {
final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
validateTable(
table,
tables.getTableInformation( table ),
metadata,
options,
dialect
);
}
}
}
}

View File

@ -21,7 +21,7 @@ import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.internal.exec.ImprovedDatabaseInformationImpl;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
@ -129,7 +129,7 @@ public class Helper {
Namespace.Name defaultNamespace) {
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
try {
return new ImprovedDatabaseInformationImpl(
return new DatabaseInformationImpl(
serviceRegistry,
jdbcEnvironment,
ddlTransactionIsolator,

View File

@ -25,6 +25,7 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryAwareService;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
@ -73,12 +74,22 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
@Override
public SchemaMigrator getSchemaMigrator(Map options) {
return new SchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
if ( determineJdbcMetadaAccessStrategy( options ) == JdbcMetadaAccessStrategy.GROUPED ) {
return new GroupedSchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
}
else {
return new IndividuallySchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
}
}
@Override
public SchemaValidator getSchemaValidator(Map options) {
return new SchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
if ( determineJdbcMetadaAccessStrategy( options ) == JdbcMetadaAccessStrategy.GROUPED ) {
return new GroupedSchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
}
else {
return new IndividuallySchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
}
}
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
@ -92,6 +103,13 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
);
}
private JdbcMetadaAccessStrategy determineJdbcMetadaAccessStrategy(Map options) {
if ( options == null ) {
return JdbcMetadaAccessStrategy.interpretHbm2ddlSetting( null );
}
return JdbcMetadaAccessStrategy.interpretHbm2ddlSetting( options.get( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY ) );
}
GenerationTarget[] buildGenerationTargets(
TargetDescriptor targetDescriptor,
JdbcContext jdbcContext,

View File

@ -0,0 +1,92 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.util.Set;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* for each {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
*/
public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
public IndividuallySchemaMigratorImpl(
HibernateSchemaManagementTool tool,
SchemaFilter schemaFilter) {
super( tool, schemaFilter );
}
@Override
protected NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
GenerationTarget[] targets) {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) {
createSchemaAndCatalog(
existingDatabase,
options,
dialect,
formatter,
tryToCreateCatalogs,
tryToCreateSchemas,
exportedCatalogs,
namespace,
targets
);
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation == null ) {
createTable( table, dialect, metadata, formatter, options, targets );
}
else if ( tableInformation != null && tableInformation.isPhysicalTable() ) {
tablesInformation.addTableInformation( tableInformation );
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || ( tableInformation != null && tableInformation.isPhysicalTable() ) ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}
}
return tablesInformation;
}
}

View File

@ -0,0 +1,48 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Andrea Boriero
*
* This implementation executes one {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[])} call
* for each {@link javax.persistence.Entity} in order to determine if a corresponding database table exists.
*/
public class IndividuallySchemaValidatorImpl extends AbstractSchemaValidator {
public IndividuallySchemaValidatorImpl(
HibernateSchemaManagementTool tool,
SchemaFilter validateFilter) {
super( tool, validateFilter );
}
@Override
protected void validateTables(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName()
);
validateTable( table, tableInformation, metadata, options, dialect );
}
}
}
}

View File

@ -41,7 +41,7 @@ public class StandardIndexExporter implements Exporter<Index> {
new QualifiedNameImpl(
index.getTable().getQualifiedTableName().getCatalogName(),
index.getTable().getQualifiedTableName().getSchemaName(),
jdbcEnvironment.getIdentifierHelper().toIdentifier( index.getName() )
jdbcEnvironment.getIdentifierHelper().toIdentifier( index.getQuotedName( dialect ) )
),
jdbcEnvironment.getDialect()
);

View File

@ -1,154 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
/**
* @author Steve Ebersole
*/
public class ImprovedDatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment;
private final ImprovedExtractionContextImpl extractionContext;
private final InformationExtractor extractor;
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
public ImprovedDatabaseInformationImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
DdlTransactionIsolator ddlTransactionIsolator,
Namespace.Name defaultNamespace) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = new ImprovedExtractionContextImpl(
serviceRegistry,
jdbcEnvironment,
ddlTransactionIsolator,
defaultNamespace.getCatalog(),
defaultNamespace.getSchema(),
this
);
// todo : make this pluggable
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
// because we do not have defined a way to locate sequence info by name
initializeSequences();
}
private void initializeSequences() throws SQLException {
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect()
.getSequenceInformationExtractor()
.extractMetadata( extractionContext );
for ( SequenceInformation sequenceInformation : itr ) {
sequenceInformationMap.put(
// for now, follow the legacy behavior of storing just the
// unqualified sequence name.
new QualifiedSequenceName(
null,
null,
sequenceInformation.getSequenceName().getSequenceName()
),
sequenceInformation
);
}
}
@Override
public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( catalog );
}
@Override
public boolean schemaExists(Namespace.Name namespace) {
return extractor.schemaExists( namespace.getCatalog(), namespace.getSchema() );
}
@Override
public TableInformation getTableInformation(
Identifier catalogName,
Identifier schemaName,
Identifier tableName) {
return getTableInformation( new QualifiedTableName( catalogName, schemaName, tableName ) );
}
@Override
public TableInformation getTableInformation(
Namespace.Name namespace,
Identifier tableName) {
return getTableInformation( new QualifiedTableName( namespace, tableName ) );
}
@Override
public TableInformation getTableInformation(QualifiedTableName tableName) {
if ( tableName.getObjectName() == null ) {
throw new IllegalArgumentException( "Passed table name cannot be null" );
}
return extractor.getTable(
tableName.getCatalogName(),
tableName.getSchemaName(),
tableName.getTableName()
);
}
@Override
public SequenceInformation getSequenceInformation(
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName) {
return getSequenceInformation( new QualifiedSequenceName( catalogName, schemaName, sequenceName ) );
}
@Override
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName) {
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
}
@Override
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName) {
return locateSequenceInformation( sequenceName );
}
@Override
public void cleanup() {
extractionContext.cleanup();
}
@Override
public TableInformation locateTableInformation(QualifiedTableName tableName) {
return getTableInformation( tableName );
}
@Override
public SequenceInformation locateSequenceInformation(QualifiedSequenceName sequenceName) {
// again, follow legacy behavior
if ( sequenceName.getCatalogName() != null || sequenceName.getSchemaName() != null ) {
sequenceName = new QualifiedSequenceName( null, null, sequenceName.getSequenceName() );
}
return sequenceInformationMap.get( sequenceName );
}
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.test.schematools;
import java.sql.Connection;
import java.sql.SQLException;
import org.hibernate.boot.MetadataSources;
@ -14,17 +15,22 @@ import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
import org.hibernate.tool.schema.extract.internal.ExtractionContextImpl;
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.junit.After;
import org.junit.Test;
import org.hibernate.testing.TestForIssue;
import org.hibernate.test.util.DdlTransactionIsolatorTestingImpl;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
@ -37,7 +43,6 @@ public class TestExtraPhysicalTableTypes {
private StandardServiceRegistry ssr;
private MetadataImplementor metadata;
@After
public void tearDown() {
StandardServiceRegistryBuilder.destroy( ssr );
@ -84,12 +89,16 @@ public class TestExtraPhysicalTableTypes {
private InformationExtractorJdbcDatabaseMetaDataImplTest buildInformationExtractorJdbcDatabaseMetaDataImplTest()
throws SQLException {
Database database = metadata.getDatabase();
final ConnectionProvider connectionProvider = ssr.getService( ConnectionProvider.class );
DatabaseInformation dbInfo = new DatabaseInformationImpl(
ssr,
database.getJdbcEnvironment(),
ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess(),
database.getDefaultNamespace().getPhysicalName().getCatalog(),
database.getDefaultNamespace().getPhysicalName().getSchema()
new DdlTransactionIsolatorTestingImpl( ssr,
new JdbcEnvironmentInitiator.ConnectionProviderJdbcConnectionAccess(
connectionProvider )
),
database.getDefaultNamespace().getName()
);
ExtractionContextImpl extractionContext = new ExtractionContextImpl(
ssr,
@ -119,7 +128,6 @@ public class TestExtraPhysicalTableTypes {
}
public class InformationExtractorJdbcDatabaseMetaDataImplTest extends InformationExtractorJdbcDatabaseMetaDataImpl {
public InformationExtractorJdbcDatabaseMetaDataImplTest(ExtractionContext extractionContext) {
super( extractionContext );
}
@ -128,4 +136,27 @@ public class TestExtraPhysicalTableTypes {
return super.isPhysicalTableType( tableType );
}
}
class DdlTransactionIsolatorImpl implements DdlTransactionIsolator{
@Override
public JdbcContext getJdbcContext() {
return null;
}
@Override
public void prepare() {
}
@Override
public Connection getIsolatedConnection() {
return null;
}
@Override
public void release() {
}
}
}

View File

@ -0,0 +1,114 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.schemaupdate;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.TargetType;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* @author Andrea Boriero
*/
@RunWith(Parameterized.class)
public class ColumnNamesTest {
@Parameterized.Parameters
public static Collection<String> parameters() {
return Arrays.asList(
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
);
}
@Parameterized.Parameter
public String jdbcMetadataExtractorStrategy;
private StandardServiceRegistry ssr;
private Metadata metadata;
private File output;
@Before
public void setUp() throws IOException {
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true" )
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
.build();
output = File.createTempFile( "update_script", ".sql" );
output.deleteOnExit();
metadata = new MetadataSources( ssr )
.addAnnotatedClass( Employee.class )
.buildMetadata();
new SchemaExport().create( EnumSet.of( TargetType.DATABASE ), metadata );
}
@After
public void tearDown() {
try {
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE ), metadata );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Test
public void testSchemaUpdateWithQuotedColumnNames() throws Exception {
new SchemaUpdate()
.setOutputFile( output.getAbsolutePath() )
.execute(
EnumSet.of( TargetType.SCRIPT ),
metadata
);
final String fileContent = new String( Files.readAllBytes( output.toPath() ) );
assertThat( "The update output file should be empty", fileContent, is( "" ) );
}
@Entity
@Table(name = "Employee")
public class Employee {
@Id
private long id;
@Column(name = "`Age`")
public String age;
@Column(name = "Name")
private String name;
private String match;
private String birthday;
private String homeAddress;
}
}

View File

@ -0,0 +1,214 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.schemaupdate;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.PrimaryKeyJoinColumn;
import javax.persistence.Table;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.hbm2ddl.SchemaValidator;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.TargetType;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* @author Andrea Boriero
*/
@RunWith(Parameterized.class)
public class SchemaUpdateTest {
@Parameterized.Parameters
public static Collection<String> parameters() {
return Arrays.asList(
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
);
}
@Parameterized.Parameter
public String jdbcMetadataExtractorStrategy;
private File output;
private StandardServiceRegistry ssr;
private MetadataImplementor metadata;
@Before
public void setUp() throws IOException {
output = File.createTempFile( "update_script", ".sql" );
output.deleteOnExit();
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true" )
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
.build();
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( LowercaseTableNameEntity.class );
metadataSources.addAnnotatedClass( TestEntity.class );
metadataSources.addAnnotatedClass( UppercaseTableNameEntity.class );
metadataSources.addAnnotatedClass( MixedCaseTableNameEntity.class );
metadataSources.addAnnotatedClass( Match.class );
metadataSources.addAnnotatedClass( InheritanceRootEntity.class );
metadataSources.addAnnotatedClass( InheritanceChildEntity.class );
metadataSources.addAnnotatedClass( InheritanceSecondChildEntity.class );
metadata = (MetadataImplementor) metadataSources.buildMetadata();
metadata.validate();
}
@After
public void tearsDown() {
new SchemaExport().setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setFormat( false )
.drop( EnumSet.of( TargetType.DATABASE ), metadata );
StandardServiceRegistryBuilder.destroy( ssr );
}
@Test
public void testSchemaUpdateAndValidation() throws Exception {
new SchemaUpdate().setHaltOnError( true )
.execute( EnumSet.of( TargetType.DATABASE ), metadata );
new SchemaValidator().validate( metadata );
new SchemaUpdate().setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setFormat( false )
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
final String fileContent = new String( Files.readAllBytes( output.toPath() ) );
assertThat( "The update output file should be empty", fileContent, is( "" ) );
}
@Entity(name = "TestEntity")
@Table(name = "`testentity`")
public static class LowercaseTableNameEntity {
@Id
long id;
String field1;
@ManyToMany(mappedBy = "entities")
Set<TestEntity> entity1s;
}
@Entity(name = "TestEntity1")
public static class TestEntity {
@Id
@Column(name = "`Id`")
long id;
String field1;
@ManyToMany
Set<LowercaseTableNameEntity> entities;
@OneToMany
@JoinColumn
private Set<UppercaseTableNameEntity> entitie2s;
@ManyToOne
private LowercaseTableNameEntity entity;
}
@Entity(name = "TestEntity2")
@Table(name = "`TESTENTITY`")
public static class UppercaseTableNameEntity {
@Id
long id;
String field1;
@ManyToOne
TestEntity testEntity;
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "FK_mixedCase"))
MixedCaseTableNameEntity mixedCaseTableNameEntity;
}
@Entity(name = "TestEntity3")
@Table(name = "`TESTentity`", indexes = {@Index(name = "index1", columnList = "`FieLd1`"), @Index(name = "Index2", columnList = "`FIELD_2`")})
public static class MixedCaseTableNameEntity {
@Id
long id;
@Column(name = "`FieLd1`")
String field1;
@Column(name = "`FIELD_2`")
String field2;
@Column(name = "`field_3`")
String field3;
String field4;
@OneToMany
@JoinColumn
private Set<Match> matches = new HashSet<>();
}
@Entity(name = "Match")
public static class Match {
@Id
long id;
String match;
@ElementCollection
@CollectionTable
private Map<Integer, Integer> timeline = new TreeMap<>();
}
@Entity(name = "InheritanceRootEntity")
@Inheritance(strategy = InheritanceType.JOINED)
public static class InheritanceRootEntity {
@Id
protected Long id;
}
@Entity(name = "InheritanceChildEntity")
@PrimaryKeyJoinColumn(name = "ID", foreignKey = @ForeignKey(name = "FK_ROOT"))
public static class InheritanceChildEntity extends InheritanceRootEntity {
}
@Entity(name = "InheritanceSecondChildEntity")
@PrimaryKeyJoinColumn(name = "ID")
public static class InheritanceSecondChildEntity extends InheritanceRootEntity {
@ManyToOne
@JoinColumn
public Match match;
}
}

View File

@ -27,25 +27,28 @@ import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
import org.hibernate.tool.schema.internal.GroupedSchemaMigratorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.SchemaMigratorImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.test.tool.schema.TargetDatabaseImpl;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -136,20 +139,8 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
final Database database = metadata.getDatabase();
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) ssr.getService( SchemaManagementTool.class );
DatabaseInformation dbInfo = new DatabaseInformationImpl(
ssr,
database.getJdbcEnvironment(),
ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess(),
database.getDefaultNamespace().getPhysicalName().getCatalog(),
database.getDefaultNamespace().getPhysicalName().getSchema()
);
final Map configurationValues = ssr.getService( ConfigurationService.class ).getSettings();
new SchemaMigratorImpl( tool ).doMigration(
metadata,
dbInfo,
new ExecutionOptions() {
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return true;
@ -164,14 +155,47 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
},
ssr.getService( JdbcEnvironment.class ).getDialect(),
buildTargets()
};
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
options,
TargetDescriptorImpl.INSTANCE
);
new SchemaDropperImpl( tool ).doDrop(
metadata,
new ExecutionOptions() {
options,
ssr.getService( JdbcEnvironment.class ).getDialect(),
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
},
buildTargets()
);
}
@Test
@TestForIssue(jiraKey = "HHH-10420")
public void testImprovedSchemaMigrationForeignKeysAreGeneratedAfterAllTheTablesAreCreated() throws Exception {
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( SchemaOneEntity.class );
metadataSources.addAnnotatedClass( SchemaTwoEntity.class );
MetadataImplementor metadata = (MetadataImplementor) metadataSources.buildMetadata();
metadata.validate();
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) ssr.getService( SchemaManagementTool.class );
final Map configurationValues = ssr.getService( ConfigurationService.class ).getSettings();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return true;
@ -186,7 +210,17 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
},
};
new GroupedSchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
options,
TargetDescriptorImpl.INSTANCE
);
new SchemaDropperImpl( tool ).doDrop(
metadata,
options,
ssr.getService( JdbcEnvironment.class ).getDialect(),
new SourceDescriptor() {
@Override
@ -209,4 +243,19 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
new TargetDatabaseImpl( ssr.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess() )
};
}
private static class TargetDescriptorImpl implements TargetDescriptor {
public static final TargetDescriptorImpl INSTANCE = new TargetDescriptorImpl();
@Override
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.DATABASE );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return null;
}
}
}

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.test.schemavalidation;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import javax.persistence.Entity;
@ -16,7 +18,9 @@ import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
@ -29,21 +33,35 @@ import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
/**
* @author Steve Ebersole
*/
@TestForIssue(jiraKey = "HHH-9693")
public class LongVarcharValidationTest extends BaseUnitTestCase implements ExecutionOptions {
@RunWith(Parameterized.class)
public class LongVarcharValidationTest implements ExecutionOptions {
@Parameterized.Parameters
public static Collection<String> parameters() {
return Arrays.asList(
new String[] {JdbcMetadaAccessStrategy.GROUPED.toString(), JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()}
);
}
@Parameterized.Parameter
public String jdbcMetadataExtractorStrategy;
private StandardServiceRegistry ssr;
@Before
public void beforeTest() {
ssr = new StandardServiceRegistryBuilder().build();
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
.build();
}
@After

View File

@ -13,13 +13,18 @@ import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.Session;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.dialect.Oracle9iDialect;
import org.hibernate.tool.hbm2ddl.SchemaValidator;
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
@ -30,32 +35,84 @@ import org.junit.Test;
*/
@RequiresDialect(Oracle9iDialect.class)
public class SynonymValidationTest extends BaseNonConfigCoreFunctionalTestCase {
private StandardServiceRegistry ssr;
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {TestEntity.class};
}
@Test
public void testSynonymValidation() {
@Before
public void setUp() {
Session s = openSession();
try {
s.getTransaction().begin();
s.createSQLQuery( "CREATE SYNONYM test_synonym FOR test_entity" ).executeUpdate();
s.getTransaction().commit();
}
catch (Exception e) {
if ( s.getTransaction().isActive() ) {
s.getTransaction().rollback();
}
}
finally {
s.close();
}
}
Configuration cfg = new Configuration();
cfg.addAnnotatedClass( TestEntityWithSynonym.class );
cfg.setProperty( AvailableSettings.ENABLE_SYNONYMS, "true" );
new SchemaValidator().validate( metadata() );
s = openSession();
@After
public void tearDown() {
Session s = openSession();
try {
s.getTransaction().begin();
s.createSQLQuery( "DROP SYNONYM test_synonym FORCE" ).executeUpdate();
s.getTransaction().commit();
}
catch (Exception e) {
if ( s.getTransaction().isActive() ) {
s.getTransaction().rollback();
}
}
finally {
s.close();
}
}
@Test
public void testSynonymUsingImprovedSchemaValidar() {
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.ENABLE_SYNONYMS, "true" )
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, JdbcMetadaAccessStrategy.GROUPED )
.build();
try {
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( TestEntityWithSynonym.class );
metadataSources.addAnnotatedClass( TestEntity.class );
new SchemaValidator().validate( metadataSources.buildMetadata() );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Test
public void testSynonymUsingSchemaValidator() {
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.ENABLE_SYNONYMS, "true" )
.applySetting( AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, JdbcMetadaAccessStrategy.GROUPED )
.build();
try {
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( TestEntityWithSynonym.class );
metadataSources.addAnnotatedClass( TestEntity.class );
new SchemaValidator().validate( metadataSources.buildMetadata() );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Entity
@Table(name = "test_entity")

View File

@ -0,0 +1,28 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.tool.schema;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.GroupedSchemaValidatorImpl;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue;
/**
* @author Andrea Boriero
*/
@TestForIssue(jiraKey = "HHH-10332")
@RequiresDialect(H2Dialect.class)
public class GroupedSchemaValidatorImplTest extends IndividuallySchemaValidatorImplTest {
@Override
protected void getSchemaValidator(MetadataImplementor metadata) {
new GroupedSchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions );
}
}

View File

@ -26,11 +26,12 @@ import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.SchemaValidatorImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
@ -58,19 +59,19 @@ import static org.junit.Assert.assertEquals;
*/
@TestForIssue(jiraKey = "HHH-10332")
@RequiresDialect(H2Dialect.class)
public class SchemaValidatorImplTest extends BaseUnitTestCase {
public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
@Rule
public LoggerInspectionRule logInspection = new LoggerInspectionRule(
Logger.getMessageLogger( CoreMessageLogger.class, SchemaValidatorImplTest.class.getName() ) );
Logger.getMessageLogger( CoreMessageLogger.class, IndividuallySchemaValidatorImplTest.class.getName() ) );
private StandardServiceRegistry ssr;
private HibernateSchemaManagementTool tool;
protected HibernateSchemaManagementTool tool;
private Map configurationValues;
private ExecutionOptions executionOptions;
protected ExecutionOptions executionOptions;
@Before
public void setUp() throws IOException {
@ -112,7 +113,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
metadata.validate();
try {
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
getSchemaValidator( metadata );
Assert.fail( "SchemaManagementException expected" );
}
catch (SchemaManagementException e) {
@ -129,7 +130,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
metadata.validate();
try {
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
getSchemaValidator( metadata );
Assert.fail( "SchemaManagementException expected" );
}
catch (SchemaManagementException e) {
@ -178,7 +179,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
metadata.validate();
try {
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
getSchemaValidator( metadata );
Assert.fail( "SchemaManagementException expected" );
}
catch (SchemaManagementException e) {
@ -233,7 +234,7 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
metadata.validate();
try {
new SchemaValidatorImpl( tool ).doValidation( metadata, executionOptions );
getSchemaValidator( metadata );
Assert.fail( "SchemaManagementException expected" );
}
catch (SchemaManagementException e) {
@ -247,6 +248,11 @@ public class SchemaValidatorImplTest extends BaseUnitTestCase {
}
}
protected void getSchemaValidator(MetadataImplementor metadata) {
new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions );
}
protected Properties properties() {
Properties properties = new Properties( );
URL propertiesURL = Thread.currentThread().getContextClassLoader().getResource( "hibernate.properties" );