HHH-15729 introduce SchemaManager, a programmatic API for schema export

featuring a brand new SchemaTruncator!
This commit is contained in:
Gavin 2022-11-14 17:49:43 +01:00 committed by Gavin King
parent 7d9ffc6158
commit ed956d514a
36 changed files with 1185 additions and 42 deletions

View File

@ -17,6 +17,7 @@ import javax.naming.Referenceable;
import org.hibernate.boot.spi.SessionFactoryOptions;
import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.graph.RootGraph;
import org.hibernate.relational.SchemaManager;
import org.hibernate.stat.Statistics;
import jakarta.persistence.EntityGraph;
@ -72,6 +73,16 @@ import jakarta.persistence.EntityManagerFactory;
* used in a sophisticated way by libraries or frameworks to implement generic
* concerns involving entity classes.
* <p>
* The factory also {@linkplain #getSchemaManager() provides} a
* {@link SchemaManager} which allows, as a convenience for writing tests:
* <ul>
* <li>programmatic {@linkplain SchemaManager#exportMappedObjects(boolean)
* schema export} and {@linkplain SchemaManager#dropMappedObjects(boolean)
* schema removal}, and
* <li>an operation for {@linkplain SchemaManager#truncateMappedObjects()
* cleaning up} data left behind by tests.
* </ul>
* <p>
* Every {@code SessionFactory} is a JPA {@link EntityManagerFactory}.
* Furthermore, when Hibernate is acting as the JPA persistence provider, the
* method {@link EntityManagerFactory#unwrap(Class)} may be used to obtain the
@ -262,6 +273,13 @@ public interface SessionFactory extends EntityManagerFactory, Referenceable, Ser
*/
Statistics getStatistics();
/**
* A {@link SchemaManager} with the same default catalog and schema as
* pooled connections belonging to this factory. Intended mostly as a
* convenience for writing tests.
*/
SchemaManager getSchemaManager();
/**
* Destroy this {@code SessionFactory} and release all its resources,
* including caches and connection pools.

View File

@ -1456,8 +1456,8 @@ public interface AvailableSettings {
* actions automatically as part of the {@link org.hibernate.SessionFactory}
* lifecycle. Valid options are enumeratd by {@link org.hibernate.tool.schema.Action}.
* <p/>
* Interpreted in combination with {@link #HBM2DDL_DATABASE_ACTION} and
* {@link #HBM2DDL_SCRIPTS_ACTION}. If no value is specified, the default
* Interpreted in combination with {@link #JAKARTA_HBM2DDL_DATABASE_ACTION} and
* {@link #JAKARTA_HBM2DDL_SCRIPTS_ACTION}. If no value is specified, the default
* is {@link org.hibernate.tool.schema.Action#NONE "none"}.
*
* @see org.hibernate.tool.schema.Action
@ -1465,7 +1465,7 @@ public interface AvailableSettings {
String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto";
/**
* @deprecated Use {@link #JAKARTA_HBM2DDL_SCRIPTS_ACTION} instead
* @deprecated Use {@link #JAKARTA_HBM2DDL_DATABASE_ACTION} instead
*/
@Deprecated
String HBM2DDL_DATABASE_ACTION = "javax.persistence.schema-generation.database.action";

View File

@ -809,4 +809,24 @@ public class DB2Dialect extends Dialect {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, dbMetaData);
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " alter foreign key " + name + " not enforced";
}
@Override
public String getEnableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " alter foreign key " + name + " enforced";
}
@Override
public String getTruncateTableStatement(String tableName) {
return super.getTruncateTableStatement(tableName) + " immediate";
}
}

View File

@ -141,8 +141,10 @@ import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporte
import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
import org.hibernate.tool.schema.internal.StandardIndexExporter;
import org.hibernate.tool.schema.internal.StandardSequenceExporter;
import org.hibernate.tool.schema.internal.StandardTableCleaner;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter;
import org.hibernate.tool.schema.spi.Cleaner;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
@ -2479,11 +2481,16 @@ public abstract class Dialect implements ConversionContext {
private final StandardUniqueKeyExporter uniqueKeyExporter = new StandardUniqueKeyExporter( this );
private final StandardAuxiliaryDatabaseObjectExporter auxiliaryObjectExporter = new StandardAuxiliaryDatabaseObjectExporter( this );
private final StandardTemporaryTableExporter temporaryTableExporter = new StandardTemporaryTableExporter( this );
private final StandardTableCleaner tableCleaner = new StandardTableCleaner( this );
public Exporter<Table> getTableExporter() {
return tableExporter;
}
public Cleaner getTableCleaner() {
return tableCleaner;
}
public Exporter<Sequence> getSequenceExporter() {
return sequenceExporter;
}
@ -2599,6 +2606,10 @@ public abstract class Dialect implements ConversionContext {
return true;
}
public boolean useCatalogAsSchema() {
return false;
}
/**
* Get the SQL command used to create the named schema
*
@ -3916,15 +3927,124 @@ public abstract class Dialect implements ConversionContext {
}
}
/**
* The {@code generated as} clause, or similar, for generated column
* declarations in DDL statements.
*
* @param generatedAs a SQL expression used to generate the column value
* @return The {@code generated as} clause containing the given expression
*/
public String generatedAs(String generatedAs) {
return " generated always as (" + generatedAs + ") stored";
}
/**
* Is an explicit column type required for {@code generated as} columns?
*
* @return {@code true} if an explicit type is required
*/
public boolean hasDataTypeBeforeGeneratedAs() {
return true;
}
/**
/**
* Is there some way to disable foreign key constraint checking while
* truncating tables? (If there's no way to do it, and if we can't
* {@linkplain #canBatchTruncate() batch truncate}, we must drop and
* recreate the constraints instead.)
*
* @return {@code true} if there is some way to do it
*
* @see #getDisableConstraintsStatement()
* @see #getDisableConstraintStatement(String, String)
*/
public boolean canDisableConstraints() {
return false;
}
/**
* A SQL statement that temporarily disables foreign key constraint
* checking for all tables.
*/
public String getDisableConstraintsStatement() {
return null;
}
/**
* A SQL statement that re-enables foreign key constraint checking for
* all tables.
*/
public String getEnableConstraintsStatement() {
return null;
}
/**
* A SQL statement that temporarily disables checking of the given
* foreign key constraint.
*
* @param tableName the name of the table
* @param name the name of the constraint
*/
public String getDisableConstraintStatement(String tableName, String name) {
return null;
}
/**
* A SQL statement that re-enables checking of the given foreign key
* constraint.
*
* @param tableName the name of the table
* @param name the name of the constraint
*/
public String getEnableConstraintStatement(String tableName, String name) {
return null;
}
/**
* Does the {@link #getTruncateTableStatement(String) truncate table}
* statement accept multiple tables?
*
* @return {@code true} if it does
*/
public boolean canBatchTruncate() {
return false;
}
/**
* A SQL statement or statements that truncate the given tables.
*
* @param tableNames the names of the tables
*/
public String[] getTruncateTableStatements(String[] tableNames) {
if ( canBatchTruncate() ) {
StringBuilder builder = new StringBuilder();
for ( String tableName : tableNames ) {
if ( builder.length() > 0 ) {
builder.append(", ");
}
builder.append( tableName );
}
return new String[] { getTruncateTableStatement( builder.toString() ) };
}
else {
String[] statements = new String[tableNames.length];
for ( int i = 0; i < tableNames.length; i++ ) {
statements[i] = getTruncateTableStatement( tableNames[i] );
}
return statements;
}
}
/**
* A SQL statement that truncates the given table.
*
* @param tableName the name of the table
*/
public String getTruncateTableStatement(String tableName) {
return "truncate table " + tableName;
}
/**
* Pluggable strategy for determining the {@link Size} to use for
* columns of a given SQL type.
* <p>

View File

@ -797,4 +797,19 @@ public class H2Dialect extends Dialect {
public String generatedAs(String generatedAs) {
return " generated always as (" + generatedAs + ")";
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getEnableConstraintsStatement() {
return "set referential_integrity true";
}
@Override
public String getDisableConstraintsStatement() {
return "set referential_integrity false";
}
}

View File

@ -822,6 +822,11 @@ public class MySQLDialect extends Dialect {
return false;
}
@Override
public boolean useCatalogAsSchema() {
return true;
}
@Override
public String[] getCreateSchemaCommand(String schemaName) {
throw new UnsupportedOperationException( "MySQL does not support dropping creating/dropping schemas in the JDBC sense" );
@ -1287,4 +1292,18 @@ public class MySQLDialect extends Dialect {
return getMySQLVersion().isSameOrAfter( 8 );
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintsStatement() {
return "set foreign_key_checks = 0";
}
@Override
public String getEnableConstraintsStatement() {
return "set foreign_key_checks = 1";
}
}

View File

@ -1287,4 +1287,19 @@ public class OracleDialect extends Dialect {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, dbMetaData);
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " disable constraint " + name;
}
@Override
public String getEnableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " enable constraint " + name;
}
}

View File

@ -1276,4 +1276,35 @@ public class PostgreSQLDialect extends Dialect {
)
);
}
/**
* @return {@code true}, but only because we can "batch" truncate
*/
@Override
public boolean canBatchTruncate() {
return true;
}
// disabled foreign key constraints still prevent 'truncate table'
// (these would help if we used 'delete' instead of 'truncate')
// @Override
// public String getDisableConstraintsStatement() {
// return "set constraints all deferred";
// }
//
// @Override
// public String getEnableConstraintsStatement() {
// return "set constraints all immediate";
// }
//
// @Override
// public String getDisableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " alter constraint " + name + " deferrable";
// }
//
// @Override
// public String getEnableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " alter constraint " + name + " deferrable";
// }
}

View File

@ -68,7 +68,6 @@ import java.time.temporal.TemporalAccessor;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
import java.util.UUID;
import jakarta.persistence.TemporalType;
@ -951,4 +950,17 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
public boolean hasDataTypeBeforeGeneratedAs() {
return false;
}
// disabled foreign key constraints still prevent 'truncate table'
// (these would help if we used 'delete' instead of 'truncate')
// @Override
// public String getDisableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " nocheck constraint " + name;
// }
//
// @Override
// public String getEnableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " with check check constraint " + name;
// }
}

View File

@ -43,6 +43,7 @@ import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.query.BindableType;
import org.hibernate.query.criteria.HibernateCriteriaBuilder;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.relational.SchemaManager;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.Type;
@ -107,6 +108,11 @@ public class SessionFactoryDelegatingImpl implements SessionFactoryImplementor,
return delegate.getStatistics();
}
@Override
public SchemaManager getSchemaManager() {
return delegate().getSchemaManager();
}
@Override
public RuntimeMetamodelsImplementor getRuntimeMetamodels() {
return delegate.getRuntimeMetamodels();

View File

@ -112,6 +112,8 @@ import org.hibernate.query.spi.QueryImplementor;
import org.hibernate.query.sql.spi.NativeQueryImplementor;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.spi.NamedSqmQueryMemento;
import org.hibernate.relational.SchemaManager;
import org.hibernate.relational.internal.SchemaManagerImpl;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.hibernate.resource.jdbc.spi.StatementInspector;
import org.hibernate.resource.transaction.backend.jta.internal.synchronization.ExceptionMapper;
@ -192,6 +194,8 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
private final transient StatelessSessionBuilder defaultStatelessOptions;
private final transient EntityNameResolver entityNameResolver;
private final transient SchemaManager schemaManager;
public SessionFactoryImpl(
final MetadataImplementor bootMetamodel,
SessionFactoryOptions options) {
@ -411,6 +415,8 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
}
throw e;
}
this.schemaManager = new SchemaManagerImpl( this, bootMetamodel );
}
private SessionBuilder createDefaultSessionOpenOptionsIfPossible() {
@ -1588,6 +1594,11 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
return wrapperOptions;
}
@Override
public SchemaManager getSchemaManager() {
return schemaManager;
}
private enum Status {
OPEN,
CLOSING,

View File

@ -452,7 +452,7 @@ public class Table implements Serializable, ContributableDatabaseObject {
metadata
);
if ( column.getGeneratedAs()==null || dialect.hasDataTypeBeforeGeneratedAs() ) {
alter.append( ' ' ).append(columnType);
alter.append( ' ' ).append( columnType );
}
final String defaultValue = column.getDefaultValue();

View File

@ -0,0 +1,59 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.relational;
import org.hibernate.Incubating;
/**
* Allows programmatic {@link #exportMappedObjects schema export},
* {@link #validateMappedObjects schema validation},
* {@link #truncateMappedObjects data cleanup}, and
* {@link #dropMappedObjects schema cleanup} as a convenience for
* writing tests.
*
* @see org.hibernate.SessionFactory#getSchemaManager()
*
* @author Gavin King
*/
@Incubating
public interface SchemaManager {
/**
* Export database objects mapped by Hibernate entities.
*
* Programmatic way to run {@link org.hibernate.tool.schema.spi.SchemaCreator}.
*
* @param createSchemas if {@code true}, attempt to create schemas,
* otherwise, assume the schemas already exist
*/
void exportMappedObjects(boolean createSchemas);
/**
* Drop database objects mapped by Hibernate entities, undoing the
* {@linkplain #exportMappedObjects(boolean) previous export}.
* <p>
* Programmatic way to run {@link org.hibernate.tool.schema.spi.SchemaDropper}.
*
* @param dropSchemas if {@code true}, drop schemas,
* otherwise, leave them be
*/
void dropMappedObjects(boolean dropSchemas);
/**
* Validate that the database objects mapped by Hibernate entities
* have the expected definitions.
* <p>
* Programmatic way to run {@link org.hibernate.tool.schema.spi.SchemaValidator}.
*/
void validateMappedObjects();
/**
* Truncate the database tables mapped by Hibernate entities.
* <p>
* Programmatic way to run {@link org.hibernate.tool.schema.spi.SchemaTruncator}.
*/
void truncateMappedObjects();
}

View File

@ -0,0 +1,91 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.relational.internal;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.relational.SchemaManager;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import java.util.HashMap;
import java.util.Map;
/**
* Implementation of {@link SchemaManager}, backed by a {@link SessionFactoryImplementor}
* and {@link SchemaManagementToolCoordinator}.
*
* @author Gavin King
*/
public class SchemaManagerImpl implements SchemaManager {
private final SessionFactoryImplementor sessionFactory;
private final MetadataImplementor metadata;
public SchemaManagerImpl(
SessionFactoryImplementor sessionFactory,
MetadataImplementor metadata) {
this.sessionFactory = sessionFactory;
this.metadata = metadata;
}
@Override
public void exportMappedObjects(boolean createSchemas) {
Map<String, Object> properties = new HashMap<>( sessionFactory.getProperties() );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, Action.CREATE_ONLY );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, Action.NONE );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS, createSchemas );
SchemaManagementToolCoordinator.process(
metadata,
sessionFactory.getServiceRegistry(),
properties,
action -> {}
);
}
@Override
public void dropMappedObjects(boolean dropSchemas) {
Map<String, Object> properties = new HashMap<>( sessionFactory.getProperties() );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, Action.DROP );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, Action.NONE );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS, dropSchemas );
SchemaManagementToolCoordinator.process(
metadata,
sessionFactory.getServiceRegistry(),
properties,
action -> {}
);
}
@Override
public void validateMappedObjects() {
Map<String, Object> properties = new HashMap<>( sessionFactory.getProperties() );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, Action.VALIDATE );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, Action.NONE );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS, false );
SchemaManagementToolCoordinator.process(
metadata,
sessionFactory.getServiceRegistry(),
properties,
action -> {}
);
}
@Override
public void truncateMappedObjects() {
Map<String, Object> properties = new HashMap<>( sessionFactory.getProperties() );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, Action.TRUNCATE );
properties.put( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, Action.NONE );
SchemaManagementToolCoordinator.process(
metadata,
sessionFactory.getServiceRegistry(),
properties,
action -> {}
);
}
}

View File

@ -0,0 +1,11 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
/**
* Programmatic access to the schema management tool.
*/
package org.hibernate.relational;

View File

@ -36,23 +36,29 @@ public class DdlTransactionIsolatorNonJtaImpl implements DdlTransactionIsolator
@Override
public Connection getIsolatedConnection() {
return getIsolatedConnection(true);
}
@Override
public Connection getIsolatedConnection(boolean autocommit) {
if ( jdbcConnection == null ) {
try {
this.jdbcConnection = jdbcContext.getJdbcConnectionAccess().obtainConnection();
try {
if ( !jdbcConnection.getAutoCommit() ) {
ConnectionAccessLogger.INSTANCE.informConnectionLocalTransactionForNonJtaDdl( jdbcContext.getJdbcConnectionAccess() );
if ( jdbcConnection.getAutoCommit() != autocommit ) {
try {
jdbcConnection.commit();
jdbcConnection.setAutoCommit( true );
if ( autocommit ) {
ConnectionAccessLogger.INSTANCE.informConnectionLocalTransactionForNonJtaDdl( jdbcContext.getJdbcConnectionAccess() );
jdbcConnection.commit();
}
jdbcConnection.setAutoCommit( autocommit );
unsetAutoCommit = true;
}
catch (SQLException e) {
throw jdbcContext.getSqlExceptionHelper().convert(
e,
"Unable to set JDBC Connection into auto-commit mode in preparation for DDL execution"
"Unable to set JDBC Connection auto-commit mode in preparation for DDL execution"
);
}
}
@ -82,12 +88,12 @@ public class DdlTransactionIsolatorNonJtaImpl implements DdlTransactionIsolator
try {
if ( unsetAutoCommit ) {
try {
jdbcConnection.setAutoCommit( false );
jdbcConnection.setAutoCommit( !jdbcConnection.getAutoCommit() );
}
catch (SQLException e) {
originalException = jdbcContext.getSqlExceptionHelper().convert(
e,
"Unable to set auto commit to false for JDBC Connection used for DDL execution" );
"Unable to unset auto-commit mode for JDBC Connection used for DDL execution" );
}
catch (Throwable t1) {
originalException = t1;

View File

@ -30,7 +30,7 @@ public class DdlTransactionIsolatorJtaImpl implements DdlTransactionIsolator {
private final JdbcContext jdbcContext;
private final Transaction suspendedTransaction;
private final Connection jdbcConnection;
private Connection jdbcConnection;
public DdlTransactionIsolatorJtaImpl(JdbcContext jdbcContext) {
this.jdbcContext = jdbcContext;
@ -55,19 +55,6 @@ public class DdlTransactionIsolatorJtaImpl implements DdlTransactionIsolator {
throw new HibernateException( "Unable to suspend current JTA transaction in preparation for DDL execution" );
}
try {
this.jdbcConnection = jdbcContext.getJdbcConnectionAccess().obtainConnection();
}
catch (SQLException e) {
throw jdbcContext.getSqlExceptionHelper().convert( e, "Unable to open JDBC Connection for DDL execution" );
}
try {
jdbcConnection.setAutoCommit( true );
}
catch (SQLException e) {
throw jdbcContext.getSqlExceptionHelper().convert( e, "Unable set JDBC Connection for DDL execution to autocommit" );
}
}
@Override
@ -77,6 +64,28 @@ public class DdlTransactionIsolatorJtaImpl implements DdlTransactionIsolator {
@Override
public Connection getIsolatedConnection() {
return getIsolatedConnection(true);
}
@Override
public Connection getIsolatedConnection(boolean autocommit) {
if ( jdbcConnection == null ) {
try {
jdbcConnection = jdbcContext.getJdbcConnectionAccess().obtainConnection();
}
catch (SQLException e) {
throw jdbcContext.getSqlExceptionHelper().convert( e, "Unable to open JDBC Connection for DDL execution" );
}
try {
if ( jdbcConnection.getAutoCommit() != autocommit ) {
jdbcConnection.setAutoCommit( autocommit );
}
}
catch (SQLException e) {
throw jdbcContext.getSqlExceptionHelper().convert( e, "Unable set JDBC Connection for DDL execution to autocommit" );
}
}
return jdbcConnection;
}

View File

@ -21,15 +21,28 @@ public interface DdlTransactionIsolator {
JdbcContext getJdbcContext();
/**
* Returns a Connection that is usable within the bounds of the
* Returns a {@link Connection} that is usable within the bounds of the
* {@link TransactionCoordinatorBuilder#buildDdlTransactionIsolator}
* and {@link #release} calls. Further, this Connection will be
* isolated (transactionally) from any transaction in effect prior
* to the call to {@code buildDdlTransactionIsolator}.
* and {@link #release} calls, with autocommit mode enabled. Further,
* this {@code Connection} will be isolated (transactionally) from any
* transaction in effect prior to the call to
* {@code buildDdlTransactionIsolator}.
*
* @return The Connection.
*/
Connection getIsolatedConnection();
/**
* Returns a {@link Connection} that is usable within the bounds of the
* {@link TransactionCoordinatorBuilder#buildDdlTransactionIsolator}
* and {@link #release} calls, with the given autocommit mode. Further,
* this {@code Connection} will be isolated (transactionally) from any
* transaction in effect prior to the call to
* {@code buildDdlTransactionIsolator}.
*
* @return The Connection.
*/
Connection getIsolatedConnection(boolean autocommit);
void release();
}

View File

@ -60,7 +60,13 @@ public enum Action {
/**
* "update" (Hibernate only) - update (alter) the database schema
*/
UPDATE( null, "update" );
UPDATE( null, "update" ),
/**
* Truncate the tables in the schema.
*
* Corresponds to a call to {@link org.hibernate.tool.schema.spi.SchemaTruncator}.
*/
TRUNCATE( null, null);
private final String externalJpaName;
private final String externalHbm2ddlName;
@ -89,8 +95,8 @@ public enum Action {
/**
* Used when processing JPA configuration to interpret the user config values. Generally
* this will be a value specified by {@value org.hibernate.cfg.AvailableSettings#HBM2DDL_DATABASE_ACTION}
* or {@value org.hibernate.cfg.AvailableSettings#HBM2DDL_SCRIPTS_ACTION}
* this will be a value specified by {@value org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_DATABASE_ACTION}
* or {@value org.hibernate.cfg.AvailableSettings#JAKARTA_HBM2DDL_SCRIPTS_ACTION}
*
* @param value The encountered config value
*

View File

@ -40,8 +40,17 @@ class DdlTransactionIsolatorProvidedConnectionImpl implements DdlTransactionIsol
@Override
public Connection getIsolatedConnection() {
return getIsolatedConnection(true);
}
@Override
public Connection getIsolatedConnection(boolean autocommit) {
try {
return jdbcContext.getJdbcConnectionAccess().obtainConnection();
Connection connection = jdbcContext.getJdbcConnectionAccess().obtainConnection();
if ( connection.getAutoCommit() != autocommit ) {
throw new SchemaManagementException( "User-provided Connection via JdbcConnectionAccessProvidedConnectionImpl has wrong auto-commit mode" );
}
return connection;
}
catch (SQLException e) {
// should never happen

View File

@ -35,4 +35,9 @@ public class DefaultSchemaFilterProvider implements SchemaFilterProvider {
public SchemaFilter getValidateFilter() {
return DefaultSchemaFilter.INSTANCE;
}
@Override
public SchemaFilter getTruncatorFilter() {
return DefaultSchemaFilter.INSTANCE;
}
}

View File

@ -46,6 +46,7 @@ import org.hibernate.tool.schema.spi.SchemaFilterProvider;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.SchemaTruncator;
import org.hibernate.tool.schema.spi.SchemaValidator;
import org.hibernate.tool.schema.spi.TargetDescriptor;
@ -91,6 +92,11 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
return new SchemaDropperImpl( this, getSchemaFilterProvider( options ).getDropFilter() );
}
@Override
public SchemaTruncator getSchemaTruncator(Map<String,Object> options) {
return new SchemaTruncatorImpl( this, getSchemaFilterProvider( options ).getTruncatorFilter() );
}
@Override
public SchemaMigrator getSchemaMigrator(Map<String,Object> options) {
if ( determineJdbcMetadaAccessStrategy( options ) == JdbcMetadaAccessStrategy.GROUPED ) {
@ -166,7 +172,7 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = customTarget == null
? new GenerationTargetToDatabase( getDdlTransactionIsolator( jdbcContext ), true )
? new GenerationTargetToDatabase( getDdlTransactionIsolator( jdbcContext ), true, needsAutoCommit )
: customTarget;
index++;
}

View File

@ -0,0 +1,297 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.boot.model.relational.internal.SqlStringGenerationContextImpl;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaTruncator;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* @author Gavin King
*/
public class SchemaTruncatorImpl implements SchemaTruncator {
private static final Logger log = Logger.getLogger( SchemaTruncatorImpl.class );
private final HibernateSchemaManagementTool tool;
public SchemaTruncatorImpl(HibernateSchemaManagementTool tool, SchemaFilter truncatorFilter) {
this.tool = tool;
}
@Override
public void doTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(),
true ); //we need autocommit on for DB2 at least
doTruncate( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), targets );
}
private void doTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performTruncate( metadata, options, contributableInclusionFilter, dialect, targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
private void performTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
GenerationTarget... targets) {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
truncateFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
private void truncateFromMetadata(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(
metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues() );
final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
for ( Namespace namespace : database.getNamespaces() ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
disableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext,
contributableInclusionFilter, targets );
applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options,targets );
// now it's safe to drop the tables
List<Table> list = new ArrayList<>( namespace.getTables().size() );
for ( Table table : namespace.getTables() ) {
if ( ! table.isPhysicalTable() ) {
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
list.add( table );
}
applySqlStrings( dialect.getTableCleaner().getSqlTruncateStrings( list, metadata,
sqlStringGenerationContext
), formatter, options,targets );
//TODO: reset the sequences?
// for ( Sequence sequence : namespace.getSequences() ) {
// if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
// continue;
// }
// if ( ! contributableInclusionFilter.matches( sequence ) ) {
// continue;
// }
// checkExportIdentifier( sequence, exportIdentifiers );
//
// applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata,
// sqlStringGenerationContext
// ), formatter, options, targets );
// }
applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options,targets );
enableConstraints( namespace, metadata, formatter, options, sqlStringGenerationContext,
contributableInclusionFilter, targets );
}
}
private void disableConstraints(
Namespace namespace,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner().getSqlDisableConstraintString( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
}
private void enableConstraints(
Namespace namespace,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SqlStringGenerationContext sqlStringGenerationContext,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
for ( ForeignKey foreignKey : table.getForeignKeys().values() ) {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata,
sqlStringGenerationContext
),
formatter,
options,
targets
);
}
}
}
}
private static void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new SchemaManagementException( "SQL strings added more than once for: " + exportIdentifier );
}
exportIdentifiers.add( exportIdentifier );
}
private static void applySqlStrings(
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
String sqlStringFormatted = formatter.format( sqlString );
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlStringFormatted );
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
}
}
}
}

View File

@ -0,0 +1,68 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.QualifiedNameParser;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.spi.Cleaner;
import java.util.Collection;
import java.util.stream.Collectors;
/**
* @author Gavin King
*/
public class StandardTableCleaner implements Cleaner {
protected final Dialect dialect;
public StandardTableCleaner(Dialect dialect) {
this.dialect = dialect;
}
@Override
public String getSqlBeforeString() {
return dialect.getDisableConstraintsStatement();
}
@Override
public String getSqlAfterString() {
return dialect.getEnableConstraintsStatement();
}
@Override
public String[] getSqlTruncateStrings(Collection<Table> tables, Metadata metadata, SqlStringGenerationContext context) {
String[] tableNames = tables.stream()
.map( table -> context.format( getTableName(table) ) )
.collect( Collectors.toList() )
.toArray( ArrayHelper.EMPTY_STRING_ARRAY );
return dialect.getTruncateTableStatements( tableNames );
}
@Override
public String getSqlDisableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context) {
return dialect.getDisableConstraintStatement( context.format( getTableName( foreignKey.getTable() ) ), foreignKey.getName() );
}
@Override
public String getSqlEnableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context) {
return dialect.getEnableConstraintStatement( context.format( getTableName( foreignKey.getTable() ) ), foreignKey.getName() );
}
private static QualifiedNameParser.NameParts getTableName(Table table) {
return new QualifiedNameParser.NameParts(
Identifier.toIdentifier(table.getCatalog(), table.isCatalogQuoted()),
Identifier.toIdentifier(table.getSchema(), table.isSchemaQuoted()),
table.getNameIdentifier()
);
}
}

View File

@ -28,14 +28,20 @@ public class GenerationTargetToDatabase implements GenerationTarget {
private final boolean releaseAfterUse;
private Statement jdbcStatement;
private boolean autocommit;
public GenerationTargetToDatabase(DdlTransactionIsolator ddlTransactionIsolator) {
this( ddlTransactionIsolator, true );
}
public GenerationTargetToDatabase(DdlTransactionIsolator ddlTransactionIsolator, boolean releaseAfterUse) {
this( ddlTransactionIsolator, releaseAfterUse, true );
}
public GenerationTargetToDatabase(DdlTransactionIsolator ddlTransactionIsolator, boolean releaseAfterUse, boolean autocommit) {
this.ddlTransactionIsolator = ddlTransactionIsolator;
this.releaseAfterUse = releaseAfterUse;
this.autocommit = autocommit;
}
@Override
@ -74,7 +80,7 @@ public class GenerationTargetToDatabase implements GenerationTarget {
private Statement jdbcStatement() {
if ( jdbcStatement == null ) {
try {
this.jdbcStatement = ddlTransactionIsolator.getIsolatedConnection().createStatement();
jdbcStatement = ddlTransactionIsolator.getIsolatedConnection( autocommit ).createStatement();
}
catch (SQLException e) {
throw ddlTransactionIsolator.getJdbcContext().getSqlExceptionHelper().convert( e, "Unable to create JDBC Statement for DDL execution" );

View File

@ -0,0 +1,50 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import java.util.Collection;
/**
* An object that produces the SQL required to truncate the tables in a schema.
*
* @author Gavin King
*/
@Incubating
public interface Cleaner {
/**
* A statement to run before beginning the process of truncating tables.
* (Usually to disable foreign key constraint enforcement.)
*/
String getSqlBeforeString();
/**
* A statement to run after ending the process of truncating tables.
* (Usually to re-enable foreign key constraint enforcement.)
*/
String getSqlAfterString();
/**
* A statement that disables the given foreign key constraint.
*/
String getSqlDisableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context);
/**
* A statement that re-enables the given foreign key constraint.
*/
String getSqlEnableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context);
/**
* A statement or statements that truncate the given tables.
*/
String[] getSqlTruncateStrings(Collection<Table> tables, Metadata metadata, SqlStringGenerationContext context);
}

View File

@ -31,6 +31,13 @@ public interface SchemaFilterProvider {
*/
SchemaFilter getDropFilter();
/**
* Get the filter to be applied to {@link SchemaTruncator} processing
*
* @return The {@link SchemaTruncator} filter
*/
SchemaFilter getTruncatorFilter();
/**
* Get the filter to be applied to {@link SchemaMigrator} processing
*

View File

@ -23,6 +23,7 @@ public interface SchemaManagementTool extends Service {
SchemaDropper getSchemaDropper(Map<String,Object> options);
SchemaMigrator getSchemaMigrator(Map<String,Object> options);
SchemaValidator getSchemaValidator(Map<String,Object> options);
SchemaTruncator getSchemaTruncator(Map<String,Object> options);
/**
* This allows to set an alternative implementation for the Database

View File

@ -297,6 +297,18 @@ public class SchemaManagementToolCoordinator {
);
break;
}
case TRUNCATE: {
tool.getSchemaTruncator( executionOptions.getConfigurationValues() ).doTruncate(
metadata,
executionOptions,
contributableInclusionFilter,
buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE,
serviceRegistry
)
);
}
}
}

View File

@ -0,0 +1,30 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
/**
* Service delegate for handling schema truncation.
*/
@Incubating
public interface SchemaTruncator {
/**
* Perform a schema truncation from the indicated source(s) to the indicated target(s).
* @param metadata Represents the schema to be dropped.
* @param options Options for executing the drop
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param targetDescriptor description of the target(s) for the drop commands
*/
void doTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor);
}

View File

@ -0,0 +1,84 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.orm.test.catalog;
import jakarta.persistence.CascadeType;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.ManyToOne;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* @author Gavin King
*/
@DomainModel(annotatedClasses = {SchemaManagerOracleTest.Book.class, SchemaManagerOracleTest.Author.class})
@SessionFactory(exportSchema = false)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, reason = "doesn't work in the CI")
@RequiresDialectFeature(feature= DialectFeatureChecks.SupportsTruncateTable.class)
public class SchemaManagerOracleTest {
@BeforeEach
public void clean(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().dropMappedObjects(false);
}
private Long countBooks(SessionImplementor s) {
return s.createQuery("select count(*) from BookForTesting", Long.class).getSingleResult();
}
@Test public void test0(SessionFactoryScope scope) {
SessionFactoryImplementor factory = scope.getSessionFactory();
factory.getSchemaManager().exportMappedObjects(true);
scope.inTransaction( s -> s.persist( new Book() ) );
factory.getSchemaManager().validateMappedObjects();
scope.inTransaction( s -> assertEquals( 1, countBooks(s) ) );
factory.getSchemaManager().truncateMappedObjects();
scope.inTransaction( s -> assertEquals( 0, countBooks(s) ) );
factory.getSchemaManager().dropMappedObjects(true);
try {
factory.getSchemaManager().validateMappedObjects();
fail();
}
catch (SchemaManagementException e) {
assertTrue( e.getMessage().contains("ForTesting") );
}
}
@Entity(name="BookForTesting")
static class Book {
@Id String isbn = "xyz123";
String title = "Hibernate in Action";
@ManyToOne(cascade = CascadeType.PERSIST)
Author author = new Author();
{
author.favoriteBook = this;
}
}
@Entity(name="AuthorForTesting")
static class Author {
@Id String name = "Christian & Gavin";
@ManyToOne
public Book favoriteBook;
}
}

View File

@ -0,0 +1,88 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.orm.test.catalog;
import jakarta.persistence.CascadeType;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.ManyToOne;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* @author Gavin King
*/
@DomainModel(annotatedClasses = {SchemaManagerTest.Book.class, SchemaManagerTest.Author.class})
@SessionFactory(exportSchema = false)
@ServiceRegistry(settings = @Setting(name = AvailableSettings.DEFAULT_SCHEMA, value = "schema_manager_test"))
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle tests run in the DBO schema")
@RequiresDialectFeature(feature= DialectFeatureChecks.SupportsTruncateTable.class)
public class SchemaManagerTest {
@BeforeEach
public void clean(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().dropMappedObjects(true);
}
private Long countBooks(SessionImplementor s) {
return s.createQuery("select count(*) from BookForTesting", Long.class).getSingleResult();
}
@Test public void test0(SessionFactoryScope scope) {
SessionFactoryImplementor factory = scope.getSessionFactory();
factory.getSchemaManager().exportMappedObjects(true);
scope.inTransaction( s -> s.persist( new Book() ) );
factory.getSchemaManager().validateMappedObjects();
scope.inTransaction( s -> assertEquals( 1, countBooks(s) ) );
factory.getSchemaManager().truncateMappedObjects();
scope.inTransaction( s -> assertEquals( 0, countBooks(s) ) );
factory.getSchemaManager().dropMappedObjects(true);
try {
factory.getSchemaManager().validateMappedObjects();
fail();
}
catch (SchemaManagementException e) {
assertTrue( e.getMessage().contains("ForTesting") );
}
}
@Entity(name="BookForTesting")
static class Book {
@Id String isbn = "xyz123";
String title = "Hibernate in Action";
@ManyToOne(cascade = CascadeType.PERSIST)
Author author = new Author();
{
author.favoriteBook = this;
}
}
@Entity(name="AuthorForTesting")
static class Author {
@Id String name = "Christian & Gavin";
@ManyToOne
public Book favoriteBook;
}
}

View File

@ -30,7 +30,6 @@ import static org.junit.jupiter.api.Assertions.fail;
/**
* @author Emmanuel Bernard
*/
@SuppressWarnings("unchecked")
@Jpa(
annotatedClasses = {
Music.class,

View File

@ -51,6 +51,7 @@ public class SchemaDatabaseFileGenerationFailureTest {
@BeforeEach
public void setUp() throws IOException, SQLException {
connection = Mockito.mock( Connection.class );
when ( connection.getAutoCommit() ).thenReturn( true );
Statement statement = Mockito.mock( Statement.class );
when( connection.createStatement() ).thenReturn( statement );
when( statement.execute( anyString() ) ).thenThrow( new SQLException( "Expected" ) );
@ -73,7 +74,7 @@ public class SchemaDatabaseFileGenerationFailureTest {
public void testErrorMessageContainsTheFailingDDLCommand() {
try {
entityManagerFactoryBuilder.generateSchema();
fail( "Should haave thrown IOException" );
fail( "Should have thrown IOException" );
}
catch (Exception e) {
assertTrue( e instanceof PersistenceException );

View File

@ -205,6 +205,11 @@ public class TestExtraPhysicalTableTypes {
return null;
}
@Override
public Connection getIsolatedConnection(boolean autocommit) {
return null;
}
@Override
public void release() {

View File

@ -277,8 +277,7 @@ abstract public class DialectFeatureChecks {
public static class SupportsWithTies implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.supportsFetchClause( FetchClauseType.ROWS_WITH_TIES )
|| dialect.supportsWindowFunctions()
;
|| dialect.supportsWindowFunctions();
}
}
@ -462,4 +461,18 @@ abstract public class DialectFeatureChecks {
return dialect.supportsRecursiveCTE();
}
}
public static class SupportsTruncateTable implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect instanceof MySQLDialect
|| dialect instanceof H2Dialect
|| dialect instanceof SQLServerDialect
|| dialect instanceof PostgreSQLDialect
|| dialect instanceof DB2Dialect
|| dialect instanceof OracleDialect
|| dialect instanceof SybaseDialect
|| dialect instanceof DerbyDialect
|| dialect instanceof HSQLDialect;
}
}
}