HHH-10458 - Better encapsulate SchemaManagementTool (OGM) - unify JPA schema generation and hbm2ddl capabilities;

HHH-10487 - Add @Incubating annotation
This commit is contained in:
Steve Ebersole 2016-02-05 13:16:57 -06:00
parent eea9a943b3
commit 17de173cb5
165 changed files with 5744 additions and 3500 deletions

View File

@ -0,0 +1,23 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.PACKAGE;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.CLASS;
/**
* @author Steve Ebersole
*/
@Target({PACKAGE, TYPE, METHOD})
@Retention(CLASS)
public @interface Incubating {
}

View File

@ -243,11 +243,27 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
}
try {
return getAggregatedClassLoader().getResource( name );
final URL url = getAggregatedClassLoader().getResource( name );
if ( url != null ) {
return url;
}
}
catch (Exception ignore) {
}
if ( name.startsWith( "/" ) ) {
name = name.substring( 1 );
try {
final URL url = getAggregatedClassLoader().getResource( name );
if ( url != null ) {
return url;
}
}
catch (Exception ignore) {
}
}
return null;
}

View File

@ -7,6 +7,7 @@
package org.hibernate.cfg;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.tool.schema.SourceType;
/**
* @author Steve Ebersole
@ -560,12 +561,6 @@ public interface AvailableSettings {
*/
String QUERY_STARTUP_CHECKING = "hibernate.query.startup_check";
/**
* Auto export/update schema using hbm2ddl tool. Valid values are <tt>update</tt>,
* <tt>create</tt>, <tt>create-drop</tt> and <tt>validate</tt>.
*/
String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto";
/**
* The {@link org.hibernate.exception.spi.SQLExceptionConverter} to use for converting SQLExceptions
* to Hibernate's JDBCException hierarchy. The default is to use the configured
@ -738,46 +733,6 @@ public interface AvailableSettings {
// Still to categorize
/**
* Comma-separated names of the optional files containing SQL DML statements executed
* during the SessionFactory creation.
* File order matters, the statements of a give file are executed before the statements of the
* following files.
*
* These statements are only executed if the schema is created ie if <tt>hibernate.hbm2ddl.auto</tt>
* is set to <tt>create</tt> or <tt>create-drop</tt>.
*
* The default value is <tt>/import.sql</tt>
*/
String HBM2DDL_IMPORT_FILES = "hibernate.hbm2ddl.import_files";
/**
* {@link String} reference to {@link org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor} implementation class.
* Referenced implementation is required to provide non-argument constructor.
*
* The default value is <tt>org.hibernate.tool.hbm2ddl.SingleLineSqlCommandExtractor</tt>.
*/
String HBM2DDL_IMPORT_FILES_SQL_EXTRACTOR = "hibernate.hbm2ddl.import_files_sql_extractor";
/**
* Specifies whether to automatically create also the database schema/catalog.
* The default is false.
*
* @since 5.0
*/
String HBM2DLL_CREATE_NAMESPACES = "hibernate.hbm2dll.create_namespaces";
/**
* Used to specify the {@link org.hibernate.tool.schema.spi.SchemaFilterProvider} to be used by
* create, drop, migrate and validate operations on the database schema. SchemaFilterProvider
* provides filters that can be used to limit the scope of these operations to specific namespaces,
* tables and sequences. All objects are included by default.
*
* @since 5.1
*/
String SCHEMA_FILTER_PROVIDER = "hibernate.schema.filter.provider";
/**
* The EntityMode in which set the Session opened from the SessionFactory.
*/
@ -867,6 +822,240 @@ public interface AvailableSettings {
String NON_CONTEXTUAL_LOB_CREATION = "hibernate.jdbc.lob.non_contextual_creation";
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SchemaManagementTool settings
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Setting to perform SchemaManagementTool actions automatically as part of
* the SessionFactory lifecycle. Valid options are defined by the
* {@link org.hibernate.tool.schema.Action} enum.
* <p/>
* Interpreted in combination with {@link #HBM2DDL_DATABASE_ACTION} and
* {@link #HBM2DDL_SCRIPTS_ACTION}. If no value is specified, the default
* is "none" ({@link org.hibernate.tool.schema.Action#NONE}).
*
* @see org.hibernate.tool.schema.Action
*/
String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto";
/**
* Setting to perform SchemaManagementTool actions against the database directly via JDBC
* automatically as part of the SessionFactory lifecycle. Valid options are defined by the
* {@link org.hibernate.tool.schema.Action} enum.
* <p/>
* Interpreted in combination with {@link #HBM2DDL_AUTO}. If no value is specified, the default
* is "none" ({@link org.hibernate.tool.schema.Action#NONE}).
*
* @see org.hibernate.tool.schema.Action
*/
String HBM2DDL_DATABASE_ACTION = "javax.persistence.schema-generation.database.action";
/**
* Setting to perform SchemaManagementTool actions writing the commands into a DDL script file.
* Valid options are defined by the {@link org.hibernate.tool.schema.Action} enum.
* <p/>
* Interpreted in combination with {@link #HBM2DDL_AUTO}. If no value is specified, the default
* is "none" ({@link org.hibernate.tool.schema.Action#NONE}).
*
* @see org.hibernate.tool.schema.Action
*/
String HBM2DDL_SCRIPTS_ACTION = "javax.persistence.schema-generation.scripts.action";
/**
* Allows passing a specific {@link java.sql.Connection} instance to be used by SchemaManagementTool.
* <p/>
* May also be used to determine the values for {@value #HBM2DDL_DB_NAME},
* {@value #HBM2DDL_DB_MAJOR_VERSION} and {@value #HBM2DDL_DB_MINOR_VERSION}.
*/
String HBM2DDL_CONNECTION = "javax.persistence.schema-generation-connection";
/**
* Specifies the name of the database provider in cases where a Connection to the underlying database is
* not available (aka, mainly in generating scripts). In such cases, a value for this setting
* *must* be specified.
* <p/>
* The value of this setting is expected to match the value returned by
* {@link java.sql.DatabaseMetaData#getDatabaseProductName()} for the target database.
* <p/>
* Additionally specifying {@value #HBM2DDL_DB_MAJOR_VERSION} and/or {@value #HBM2DDL_DB_MINOR_VERSION}
* may be required to understand exactly how to generate the required schema commands.
*
* @see #HBM2DDL_DB_MAJOR_VERSION
* @see #HBM2DDL_DB_MINOR_VERSION
*/
@SuppressWarnings("JavaDoc")
String HBM2DDL_DB_NAME = "javax.persistence.database-product-name";
/**
* Specifies the major version of the underlying database, as would be returned by
* {@link java.sql.DatabaseMetaData#getDatabaseMajorVersion} for the target database. This value is used to
* help more precisely determine how to perform schema generation tasks for the underlying database in cases
* where {@value #HBM2DDL_DB_NAME} does not provide enough distinction.
* @see #HBM2DDL_DB_NAME
* @see #HBM2DDL_DB_MINOR_VERSION
*/
String HBM2DDL_DB_MAJOR_VERSION = "javax.persistence.database-major-version";
/**
* Specifies the minor version of the underlying database, as would be returned by
* {@link java.sql.DatabaseMetaData#getDatabaseMinorVersion} for the target database. This value is used to
* help more precisely determine how to perform schema generation tasks for the underlying database in cases
* where the combination of {@value #HBM2DDL_DB_NAME} and {@value #HBM2DDL_DB_MAJOR_VERSION} does not provide
* enough distinction.
*
* @see #HBM2DDL_DB_NAME
* @see #HBM2DDL_DB_MAJOR_VERSION
*/
String HBM2DDL_DB_MINOR_VERSION = "javax.persistence.database-minor-version";
/**
* Specifies whether schema generation commands for schema creation are to be determine based on object/relational
* mapping metadata, DDL scripts, or a combination of the two. See {@link SourceType} for valid set of values.
* If no value is specified, a default is assumed as follows:<ul>
* <li>
* if source scripts are specified (per {@value #HBM2DDL_CREATE_SCRIPT_SOURCE}),then "scripts" is assumed
* </li>
* <li>
* otherwise, "metadata" is assumed
* </li>
* </ul>
*
* @see SourceType
*/
String HBM2DDL_CREATE_SOURCE = "javax.persistence.schema-generation.create-source";
/**
* Specifies whether schema generation commands for schema dropping are to be determine based on object/relational
* mapping metadata, DDL scripts, or a combination of the two. See {@link SourceType} for valid set of values.
* If no value is specified, a default is assumed as follows:<ul>
* <li>
* if source scripts are specified (per {@value #HBM2DDL_DROP_SCRIPT_SOURCE}),then "scripts" is assumed
* </li>
* <li>
* otherwise, "metadata" is assumed
* </li>
* </ul>
*
* @see SourceType
*/
String HBM2DDL_DROP_SOURCE = "javax.persistence.schema-generation.drop-source";
/**
* Specifies the CREATE script file as either a {@link java.io.Reader} configured for reading of the DDL script
* file or a string designating a file {@link java.net.URL} for the DDL script.
* <p/>
* Hibernate historically also accepted {@link #HBM2DDL_IMPORT_FILES} for a similar purpose. This setting
* should be preferred over {@link #HBM2DDL_IMPORT_FILES} moving forward
*
* @see #HBM2DDL_CREATE_SOURCE
* @see #HBM2DDL_IMPORT_FILES
*/
String HBM2DDL_CREATE_SCRIPT_SOURCE = "javax.persistence.schema-generation.create-script-source";
/**
* Specifies the DROP script file as either a {@link java.io.Reader} configured for reading of the DDL script
* file or a string designating a file {@link java.net.URL} for the DDL script.
*
* @see #HBM2DDL_DROP_SOURCE
*/
String HBM2DDL_DROP_SCRIPT_SOURCE = "javax.persistence.schema-generation.drop-script-source";
/**
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema creation commands should
* be written to DDL script file, {@value #HBM2DDL_SCRIPTS_CREATE_TARGET} specifies either a
* {@link java.io.Writer} configured for output of the DDL script or a string specifying the file URL for the DDL
* script.
*
* @see #HBM2DDL_SCRIPTS_ACTION
*/
@SuppressWarnings("JavaDoc")
String HBM2DDL_SCRIPTS_CREATE_TARGET = "javax.persistence.schema-generation.scripts.create-target";
/**
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema drop commands should
* be written to DDL script file, {@value #HBM2DDL_SCRIPTS_DROP_TARGET} specifies either a
* {@link java.io.Writer} configured for output of the DDL script or a string specifying the file URL for the DDL
* script.
*
* @see #HBM2DDL_SCRIPTS_ACTION
*/
@SuppressWarnings("JavaDoc")
String HBM2DDL_SCRIPTS_DROP_TARGET = "javax.persistence.schema-generation.scripts.drop-target";
/**
* Comma-separated names of the optional files containing SQL DML statements executed
* during the SessionFactory creation.
* File order matters, the statements of a give file are executed before the statements of the
* following files.
* <p/>
* These statements are only executed if the schema is created ie if <tt>hibernate.hbm2ddl.auto</tt>
* is set to <tt>create</tt> or <tt>create-drop</tt>.
* <p/>
* The default value is <tt>/import.sql</tt>
* <p/>
* {@link #HBM2DDL_CREATE_SCRIPT_SOURCE} / {@link #HBM2DDL_DROP_SCRIPT_SOURCE} should be preferred
* moving forward
*/
String HBM2DDL_IMPORT_FILES = "hibernate.hbm2ddl.import_files";
/**
* JPA variant of {@link #HBM2DDL_IMPORT_FILES}
* <p/>
* Specifies a {@link java.io.Reader} configured for reading of the SQL load script or a string designating the
* file {@link java.net.URL} for the SQL load script.
* <p/>
* A "SQL load script" is a script that performs some database initialization (INSERT, etc).
*/
String HBM2DDL_LOAD_SCRIPT_SOURCE = "javax.persistence.sql-load-script-source";
/**
* Reference to the {@link org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor} implementation class
* to use for parsing source/import files as defined by {@link #HBM2DDL_CREATE_SCRIPT_SOURCE},
* {@link #HBM2DDL_DROP_SCRIPT_SOURCE} or {@link #HBM2DDL_IMPORT_FILES}.
* <p/>
* Reference may refer to an instance, a Class implementing ImportSqlCommandExtractor of the FQN
* of the ImportSqlCommandExtractor implementation. If the FQN is given, the implementation
* must provide a no-arg constructor.
* <p/>
* The default value is {@link org.hibernate.tool.hbm2ddl.SingleLineSqlCommandExtractor}.
*/
String HBM2DDL_IMPORT_FILES_SQL_EXTRACTOR = "hibernate.hbm2ddl.import_files_sql_extractor";
/**
* Specifies whether to automatically create also the database schema/catalog.
* The default is false.
*
* @since 5.0
*/
String HBM2DLL_CREATE_NAMESPACES = "hibernate.hbm2dll.create_namespaces";
/**
* The JPA variant of {@link #HBM2DLL_CREATE_NAMESPACES}
* <p/>
* Specifies whether the persistence provider is to create the database schema(s) in addition to creating
* database objects (tables, sequences, constraints, etc). The value of this boolean property should be set
* to {@code true} if the persistence provider is to create schemas in the database or to generate DDL that
* contains "CREATE SCHEMA" commands. If this property is not supplied (or is explicitly {@code false}), the
* provider should not attempt to create database schemas.
*/
String HBM2DLL_CREATE_SCHEMAS = "javax.persistence.create-database-schemas";
/**
* Used to specify the {@link org.hibernate.tool.schema.spi.SchemaFilterProvider} to be used by
* create, drop, migrate and validate operations on the database schema. SchemaFilterProvider
* provides filters that can be used to limit the scope of these operations to specific namespaces,
* tables and sequences. All objects are included by default.
*
* @since 5.1
*/
String HBM2DDL_FILTER_PROVIDER = "hibernate.hbm2ddl.schema_filter_provider";
/**
* Identifies the delimiter to use to separate schema management statements in script outputs
*/
String HBM2DDL_DELIMITER = "hibernate.hbm2ddl.delimiter";
String JMX_ENABLED = "hibernate.jmx.enabled";

View File

@ -24,7 +24,7 @@ public interface JdbcConnectionAccess extends Serializable {
*
* @throws SQLException Indicates a problem getting the connection
*/
public Connection obtainConnection() throws SQLException;
Connection obtainConnection() throws SQLException;
/**
* Release a previously obtained connection
@ -33,7 +33,7 @@ public interface JdbcConnectionAccess extends Serializable {
*
* @throws SQLException Indicates a problem releasing the connection
*/
public void releaseConnection(Connection connection) throws SQLException;
void releaseConnection(Connection connection) throws SQLException;
/**
* Does the underlying provider of connections support aggressive releasing of connections (and re-acquisition
@ -44,5 +44,5 @@ public interface JdbcConnectionAccess extends Serializable {
* @see org.hibernate.engine.jdbc.connections.spi.ConnectionProvider#supportsAggressiveRelease()
* @see org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider#supportsAggressiveRelease()
*/
public boolean supportsAggressiveRelease();
boolean supportsAggressiveRelease();
}

View File

@ -24,5 +24,5 @@ public interface DialectResolver extends Service {
*
* @return The dialect to use, or null.
*/
public Dialect resolveDialect(DialectResolutionInfo info);
Dialect resolveDialect(DialectResolutionInfo info);
}

View File

@ -28,9 +28,13 @@ public class DDLFormatterImpl implements Formatter {
if ( StringHelper.isEmpty( sql ) ) {
return sql;
}
if ( sql.toLowerCase(Locale.ROOT).startsWith( "create table" ) ) {
return formatCreateTable( sql );
}
else if ( sql.toLowerCase(Locale.ROOT).startsWith( "create" ) ) {
return sql;
}
else if ( sql.toLowerCase(Locale.ROOT).startsWith( "alter table" ) ) {
return formatAlterTable( sql );
}

View File

@ -22,33 +22,35 @@ import org.hibernate.service.Service;
public interface JdbcServices extends Service {
/**
* Obtain the JdbcEnvironment backing this JdbcServices instance.
*
* @return
*/
public JdbcEnvironment getJdbcEnvironment();
JdbcEnvironment getJdbcEnvironment();
public JdbcConnectionAccess getBootstrapJdbcConnectionAccess();
/**
* Obtain a JdbcConnectionAccess usable from bootstrap actions
* (hbm2ddl.auto, Dialect resolution, etc).
*/
JdbcConnectionAccess getBootstrapJdbcConnectionAccess();
/**
* Obtain the dialect of the database.
*
* @return The database dialect.
*/
public Dialect getDialect();
Dialect getDialect();
/**
* Obtain service for logging SQL statements.
*
* @return The SQL statement logger.
*/
public SqlStatementLogger getSqlStatementLogger();
SqlStatementLogger getSqlStatementLogger();
/**
* Obtain service for dealing with exceptions.
*
* @return The exception helper service.
*/
public SqlExceptionHelper getSqlExceptionHelper();
SqlExceptionHelper getSqlExceptionHelper();
/**
* Obtain information about supported behavior reported by the JDBC driver.
@ -57,7 +59,7 @@ public interface JdbcServices extends Service {
*
* @return The extracted database metadata, oddly enough :)
*/
public ExtractedDatabaseMetaData getExtractedMetaDataSupport();
ExtractedDatabaseMetaData getExtractedMetaDataSupport();
/**
* Create an instance of a {@link LobCreator} appropriate for the current environment, mainly meant to account for
@ -66,11 +68,11 @@ public interface JdbcServices extends Service {
* @param lobCreationContext The context in which the LOB is being created
* @return The LOB creator.
*/
public LobCreator getLobCreator(LobCreationContext lobCreationContext);
LobCreator getLobCreator(LobCreationContext lobCreationContext);
/**
* Obtain service for wrapping a {@link java.sql.ResultSet} in a "column name cache" wrapper.
* @return The ResultSet wrapper.
*/
public ResultSetWrapper getResultSetWrapper();
ResultSetWrapper getResultSetWrapper();
}

View File

@ -1357,8 +1357,12 @@ public interface CoreMessageLogger extends BasicLogger {
@Message(value = "Unsuccessful: %s", id = 388)
void unsuccessful(String sql);
/**
* @deprecated Use {@link #unsuccessfulSchemaManagementCommand} instead
*/
@LogMessage(level = ERROR)
@Message(value = "Unsuccessful: %s", id = 389)
@Deprecated
void unsuccessfulCreate(String string);
@LogMessage(level = WARN)
@ -1734,4 +1738,11 @@ public interface CoreMessageLogger extends BasicLogger {
@Message(value = "Executing import script '%s'", id = 476)
void executingImportScript(String scriptName);
@LogMessage(level = INFO)
@Message(value = "Starting delayed drop of schema as part of SessionFactory shut-down'", id = 477)
void startingDelayedSchemaDrop();
@LogMessage(level = ERROR)
@Message(value = "Unsuccessful: %s", id = 478)
void unsuccessfulSchemaManagementCommand(String command);
}

View File

@ -6,8 +6,6 @@
*/
package org.hibernate.internal;
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
@ -27,9 +25,8 @@ import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.hibernate.cache.spi.access.RegionAccessStrategy;
import org.jboss.logging.Logger;
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import org.hibernate.AssertionFailure;
import org.hibernate.Cache;
@ -69,6 +66,7 @@ import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cache.spi.access.CollectionRegionAccessStrategy;
import org.hibernate.cache.spi.access.EntityRegionAccessStrategy;
import org.hibernate.cache.spi.access.NaturalIdRegionAccessStrategy;
import org.hibernate.cache.spi.access.RegionAccessStrategy;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.Settings;
import org.hibernate.context.internal.JTASessionContext;
@ -112,7 +110,6 @@ import org.hibernate.integrator.spi.Integrator;
import org.hibernate.integrator.spi.IntegratorService;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.config.ConfigurationException;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.RootClass;
@ -135,15 +132,15 @@ import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory;
import org.hibernate.stat.Statistics;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.hbm2ddl.SchemaValidator;
import org.hibernate.tool.schema.spi.DelayedDropAction;
import org.hibernate.tool.schema.spi.DelayedDropRegistry;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tuple.entity.EntityTuplizer;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
import org.hibernate.type.TypeResolver;
import static org.hibernate.cfg.AvailableSettings.HBM2DLL_CREATE_NAMESPACES;
import org.jboss.logging.Logger;
/**
@ -193,7 +190,6 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
private final transient Dialect dialect;
private final transient Settings settings;
private final transient Properties properties;
private transient SchemaExport schemaExport;
private final transient CurrentSessionContext currentSessionContext;
private final transient SQLFunctionRegistry sqlFunctionRegistry;
private final transient SessionFactoryObserverChain observer = new SessionFactoryObserverChain();
@ -206,6 +202,8 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
private final transient SessionFactoryOptions sessionFactoryOptions;
private final transient Map<String, RegionAccessStrategy> cacheAccessStrategiesMap = new HashMap();
private DelayedDropAction delayedDropAction;
public SessionFactoryImpl(final MetadataImplementor metadata, SessionFactoryOptions options) {
LOG.debug( "Building session factory" );
@ -457,25 +455,17 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
sessionFactoryOptions
);
boolean createDropNamespaces = ConfigurationHelper.getBoolean( HBM2DLL_CREATE_NAMESPACES, properties, false );
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( serviceRegistry, metadata, createDropNamespaces )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
.create( false, true );
}
if ( settings.isAutoUpdateSchema() ) {
new SchemaUpdate( serviceRegistry, metadata ).execute( false, true );
}
if ( settings.isAutoValidateSchema() ) {
new SchemaValidator( serviceRegistry, metadata ).validate();
}
if ( settings.isAutoDropSchema() ) {
schemaExport = new SchemaExport( serviceRegistry, metadata, createDropNamespaces )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) );
}
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
new DelayedDropRegistry() {
@Override
public void registerOnCloseAction(DelayedDropAction action) {
SessionFactoryImpl.this.delayedDropAction = action;
}
}
);
currentSessionContext = buildCurrentSessionContext();
@ -1080,8 +1070,8 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
queryPlanCache.cleanup();
if ( settings.isAutoDropSchema() ) {
schemaExport.drop( false, true );
if ( delayedDropAction != null ) {
delayedDropAction.perform( serviceRegistry );
}
SessionFactoryRegistry.INSTANCE.removeSessionFactory(

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.internal.log;
import org.hibernate.tool.schema.Action;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.LogMessage;
@ -216,5 +218,4 @@ public interface DeprecationLogger extends BasicLogger {
"new Ant-task (%s) leveraging that new bytecode enhancement. You should update your build to use the new task explicitly."
)
void logDeprecatedInstrumentTask(Class taskClass, Class newTaskClass);
}

View File

@ -12,7 +12,11 @@ import java.util.StringTokenizer;
/**
* JDBC column metadata
* @author Christoph Sturm
*
* @deprecated Use {@link org.hibernate.tool.schema.extract.spi.ColumnInformation} or
* {@link org.hibernate.tool.schema.extract.internal.ColumnInformationImpl} instead
*/
@Deprecated
public class ColumnMetadata {
private final String name;
private final String typeName;

View File

@ -13,7 +13,11 @@ import java.sql.SQLException;
* hbm2ddl tools.
*
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public interface ConnectionHelper {
/**
* Prepare the helper for use.

View File

@ -13,12 +13,18 @@ import java.sql.Statement;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
*
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class DatabaseExporter implements Exporter {
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, DatabaseExporter.class.getName() );

View File

@ -8,7 +8,11 @@ package org.hibernate.tool.hbm2ddl;
/**
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
interface Exporter {
public boolean acceptsImportScripts();
public void export(String string) throws Exception;

View File

@ -11,7 +11,11 @@ import java.io.IOException;
/**
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class FileExporter implements Exporter {
private final FileWriter writer;

View File

@ -19,7 +19,11 @@ import java.util.Map;
* JDBC foreign key metadata
*
* @author Christoph Sturm
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public class ForeignKeyMetadata {
private final String name;
private final String refTable;

View File

@ -10,7 +10,11 @@ import org.hibernate.HibernateException;
/**
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public class ImportScriptException extends HibernateException {
public ImportScriptException(String s) {
super( s );

View File

@ -13,7 +13,11 @@ import java.util.List;
/**
* JDBC index metadata
* @author Christoph Sturm
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public class IndexMetadata {
private final String name;
private final List columns = new ArrayList();

View File

@ -22,7 +22,11 @@ import org.hibernate.internal.util.config.ConfigurationHelper;
* built and managed {@link ConnectionProvider}.
*
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class ManagedProviderConnectionHelper implements ConnectionHelper {
private Properties cfgProperties;
private StandardServiceRegistryImpl serviceRegistry;

View File

@ -6,19 +6,16 @@
*/
package org.hibernate.tool.hbm2ddl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
@ -27,30 +24,34 @@ import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.internal.Helper;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
/**
* Commandline tool to export table schema to the database. This class may also be called from inside an application.
* Command-line tool for exporting (create and/or drop) a database schema. The export can
* be sent directly to the database, written to script or both.
*
* @author Daniel Bradby
* @author Gavin King
@ -59,12 +60,43 @@ import org.hibernate.tool.schema.spi.SchemaManagementTool;
public class SchemaExport {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class );
private static final String DEFAULT_IMPORT_FILE = "/import.sql";
public static enum Type {
CREATE,
DROP,
CREATE( Action.CREATE ),
DROP( Action.DROP ),
NONE( Action.NONE ),
BOTH( Action.BOTH );
private final Action actionReplacement;
Type(Action actionReplacement) {
this.actionReplacement = actionReplacement;
}
public boolean doCreate() {
return actionReplacement.doCreate();
}
public boolean doDrop() {
return actionReplacement.doDrop();
}
}
public static enum Action {
/**
* None - duh :P
*/
NONE,
/**
* Create only
*/
CREATE,
/**
* Drop only
*/
DROP,
/**
* Drop and then create
*/
BOTH;
public boolean doCreate() {
@ -74,210 +106,46 @@ public class SchemaExport {
public boolean doDrop() {
return this == BOTH || this == DROP;
}
private static Action interpret(boolean justDrop, boolean justCreate) {
if ( justDrop ) {
return Action.DROP;
}
else if ( justCreate ) {
return Action.CREATE;
}
else {
return Action.BOTH;
}
}
public static Action parseCommandLineOption(String actionText) {
if ( actionText.equalsIgnoreCase( "create" ) ) {
return CREATE;
}
else if ( actionText.equalsIgnoreCase( "drop" ) ) {
return DROP;
}
else if ( actionText.equalsIgnoreCase( "drop-and-create" ) ) {
return BOTH;
}
else {
return NONE;
}
}
}
private final ConnectionHelper connectionHelper;
private final SqlStatementLogger sqlStatementLogger;
private final SqlExceptionHelper sqlExceptionHelper;
private final ClassLoaderService classLoaderService;
private final String[] dropSQL;
private final String[] createSQL;
private final String importFiles;
boolean haltOnError = false;
boolean format = false;
boolean manageNamespaces = false;
String delimiter = null;
String outputFile = null;
private String importFiles;
private final List<Exception> exceptions = new ArrayList<Exception>();
private Formatter formatter;
private ImportSqlCommandExtractor importSqlCommandExtractor = ImportSqlCommandExtractorInitiator.DEFAULT_EXTRACTOR;
private String outputFile;
private String delimiter;
private boolean haltOnError;
/**
* Builds a SchemaExport object.
*
* @param metadata The metadata object holding the mapping info to be exported
*/
public SchemaExport(MetadataImplementor metadata) {
this( metadata.getMetadataBuildingOptions().getServiceRegistry(), metadata );
}
/**
* Builds a SchemaExport object.
*
* @param metadata The metadata object holding the mapping info to be exported
*/
public SchemaExport(MetadataImplementor metadata, boolean createNamespaces) {
this( metadata.getMetadataBuildingOptions().getServiceRegistry(), metadata, createNamespaces );
}
/**
* Builds a SchemaExport object.
*
* @param serviceRegistry The registry of services available for use. Should, at a minimum, contain
* the JdbcServices service.
* @param metadata The metadata object holding the mapping info to be exported
*/
public SchemaExport(ServiceRegistry serviceRegistry, MetadataImplementor metadata) {
this(
new SuppliedConnectionProviderConnectionHelper(
serviceRegistry.getService( ConnectionProvider.class )
),
serviceRegistry,
metadata,
false
);
}
/**
* Builds a SchemaExport object.
*
* @param serviceRegistry The registry of services available for use. Should, at a minimum, contain
* the JdbcServices service.
* @param metadata The metadata object holding the mapping info to be exported
*/
public SchemaExport(ServiceRegistry serviceRegistry, MetadataImplementor metadata, boolean createNamespaces) {
this(
new SuppliedConnectionProviderConnectionHelper(
serviceRegistry.getService( ConnectionProvider.class )
),
serviceRegistry,
metadata,
createNamespaces
);
}
private SchemaExport(
ConnectionHelper connectionHelper,
ServiceRegistry serviceRegistry,
MetadataImplementor metadata,
boolean createNamespaces) {
this.connectionHelper = connectionHelper;
this.sqlStatementLogger = serviceRegistry.getService( JdbcServices.class ).getSqlStatementLogger();
this.formatter = ( sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
this.sqlExceptionHelper = serviceRegistry.getService( JdbcEnvironment.class ).getSqlExceptionHelper();
this.classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
this.importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
serviceRegistry.getService( ConfigurationService.class ).getSettings(),
DEFAULT_IMPORT_FILE
);
// uses the schema management tool service to generate the create/drop scripts
// longer term this class should instead just leverage the tool for its execution phase.
// That is part of the larger task to consolidate Hibernate and JPA schema management
SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
final List<String> commands = new ArrayList<String>();
final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
commands.clear();
}
@Override
public void accept(String command) {
commands.add( command );
}
@Override
public void release() {
}
};
final Map settings = serviceRegistry.getService( ConfigurationService.class ).getSettings();
schemaManagementTool.getSchemaDropper( settings ).doDrop( metadata, createNamespaces, target );
this.dropSQL = commands.toArray( new String[commands.size()] );
schemaManagementTool.getSchemaCreator( settings ).doCreation( metadata, createNamespaces, target );
this.createSQL = commands.toArray( new String[commands.size()] );
}
/**
* Intended for testing use
*
* @param connectionHelper Access to the JDBC Connection
* @param metadata The metadata object holding the mapping info to be exported
*/
public SchemaExport(
ConnectionHelper connectionHelper,
MetadataImplementor metadata) {
this(
connectionHelper,
metadata.getMetadataBuildingOptions().getServiceRegistry(),
metadata,
false
);
}
/**
* Create a SchemaExport for the given Metadata, using the supplied connection for connectivity.
*
* @param metadata The metadata object holding the mapping info to be exported
* @param connection The JDBC connection to use.
*
* @throws HibernateException Indicates problem preparing for schema export.
*/
public SchemaExport(MetadataImplementor metadata, Connection connection) throws HibernateException {
this( new SuppliedConnectionHelper( connection ), metadata );
}
/**
* @deprecated Use one of the forms accepting {@link MetadataImplementor}, rather
* than {@link Configuration}, instead.
*/
@Deprecated
public SchemaExport(ServiceRegistry serviceRegistry, Configuration configuration) {
throw new UnsupportedOperationException(
"Attempt to use unsupported SchemaExport constructor accepting org.hibernate.cfg.Configuration; " +
"one of the forms accepting org.hibernate.boot.spi.MetadataImplementor should be used instead"
);
}
/**
* @deprecated Use one of the forms accepting {@link MetadataImplementor}, rather
* than {@link Configuration}, instead.
*/
@Deprecated
public SchemaExport(Configuration configuration) {
throw new UnsupportedOperationException(
"Attempt to use unsupported SchemaExport constructor accepting org.hibernate.cfg.Configuration; " +
"one of the forms accepting org.hibernate.boot.spi.MetadataImplementor should be used instead"
);
}
/**
* @deprecated Use one of the forms accepting {@link MetadataImplementor}, rather
* than {@link Configuration}, instead.
*/
@Deprecated
public SchemaExport(Configuration configuration, Connection connection) throws HibernateException {
throw new UnsupportedOperationException(
"Attempt to use unsupported SchemaExport constructor accepting org.hibernate.cfg.Configuration; " +
"one of the forms accepting org.hibernate.boot.spi.MetadataImplementor should be used instead"
);
}
public SchemaExport(
ConnectionHelper connectionHelper,
String[] dropSql,
String[] createSql) {
this.connectionHelper = connectionHelper;
this.dropSQL = dropSql;
this.createSQL = createSql;
this.importFiles = "";
this.sqlStatementLogger = new SqlStatementLogger( false, true );
this.sqlExceptionHelper = new SqlExceptionHelper();
this.classLoaderService = new ClassLoaderServiceImpl();
this.formatter = FormatStyle.DDL.getFormatter();
}
/**
* For generating a export script file, this is the file which will be written.
@ -291,6 +159,18 @@ public class SchemaExport {
return this;
}
/**
* Comma-separated list of resource names to use for database init commands on create.
*
* @param importFiles The comma-separated list of init file resources names
*
* @return this
*/
public SchemaExport setImportFiles(String importFiles) {
this.importFiles = importFiles;
return this;
}
/**
* Set the end of statement delimiter
*
@ -311,19 +191,7 @@ public class SchemaExport {
* @return this
*/
public SchemaExport setFormat(boolean format) {
this.formatter = ( format ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
return this;
}
/**
* Set <i>import.sql</i> command extractor. By default {@link SingleLineSqlCommandExtractor} is used.
*
* @param importSqlCommandExtractor <i>import.sql</i> command extractor.
*
* @return this
*/
public SchemaExport setImportSqlCommandExtractor(ImportSqlCommandExtractor importSqlCommandExtractor) {
this.importSqlCommandExtractor = importSqlCommandExtractor;
this.format = format;
return this;
}
@ -339,231 +207,151 @@ public class SchemaExport {
return this;
}
/**
* Run the schema creation script; drop script is automatically
* executed before running the creation script.
*
* @param script print the DDL to the console
* @param export export the script to the database
*/
public void create(boolean script, boolean export) {
create( Target.interpret( script, export ) );
public SchemaExport setManageNamespaces(boolean manageNamespaces) {
this.manageNamespaces = manageNamespaces;
return this;
}
/**
* Run the schema creation script; drop script is automatically
* executed before running the creation script.
*
* @param output the target of the script.
*/
public void create(Target output) {
// need to drop tables before creating so need to specify Type.BOTH
execute( output, Type.BOTH );
public void drop(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.DROP, metadata );
}
/**
* Run the drop schema script.
*
* @param script print the DDL to the console
* @param export export the script to the database
*/
public void drop(boolean script, boolean export) {
drop( Target.interpret( script, export ) );
public void create(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.BOTH, metadata );
}
public void drop(Target output) {
execute( output, Type.DROP );
public void createOnly(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, Action.CREATE, metadata );
}
public void execute(boolean script, boolean export, boolean justDrop, boolean justCreate) {
execute( Target.interpret( script, export ), interpretType( justDrop, justCreate ) );
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata) {
execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
}
private Type interpretType(boolean justDrop, boolean justCreate) {
if ( justDrop ) {
return Type.DROP;
}
else if ( justCreate ) {
return Type.CREATE;
}
else {
return Type.BOTH;
}
}
public void execute(Target output, Type type) {
if ( ( outputFile == null && output == Target.NONE ) || type == SchemaExport.Type.NONE ) {
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( action == Action.NONE ) {
LOG.debug( "Skipping SchemaExport as Action.NONE was passed" );
return;
}
if ( targetTypes.isEmpty() ) {
LOG.debug( "Skipping SchemaExport as no targets were specified" );
return;
}
exceptions.clear();
LOG.runningHbm2ddlSchemaExport();
final List<NamedReader> importFileReaders = new ArrayList<NamedReader>();
for ( String currentFile : importFiles.split( "," ) ) {
final String resourceName = currentFile.trim();
final TargetDescriptor targetDescriptor = buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
InputStream stream = classLoaderService.locateResourceStream( resourceName );
if ( stream == null ) {
LOG.debugf( "Import file not found: %s", currentFile );
}
else {
importFileReaders.add( new NamedReader( resourceName, stream ) );
}
}
doExecution( action, needsJdbcConnection( targetTypes ), metadata, serviceRegistry, targetDescriptor );
}
public void doExecution(
Action action,
boolean needsJdbc,
Metadata metadata,
ServiceRegistry serviceRegistry,
TargetDescriptor targetDescriptor) {
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format );
config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExceptionHandler exceptionHandler = haltOnError
? ExceptionHandlerHaltImpl.INSTANCE
: new ExceptionHandlerCollectingImpl();
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
exceptionHandler
);
final SourceDescriptor sourceDescriptor = new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
};
final List<Exporter> exporters = new ArrayList<Exporter>();
try {
// prepare exporters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if ( output.doScript() ) {
exporters.add( new ScriptExporter() );
}
if ( outputFile != null ) {
exporters.add( new FileExporter( outputFile ) );
}
if ( output.doExport() ) {
exporters.add( new DatabaseExporter( connectionHelper, sqlExceptionHelper ) );
if ( action.doDrop() ) {
tool.getSchemaDropper( config ).doDrop(
metadata,
executionOptions,
sourceDescriptor,
targetDescriptor
);
}
// perform exporters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if ( type.doDrop() ) {
perform( dropSQL, exporters );
if ( action.doCreate() ) {
tool.getSchemaCreator( config ).doCreation(
metadata,
executionOptions,
sourceDescriptor,
targetDescriptor
);
}
if ( type.doCreate() ) {
perform( createSQL, exporters );
if ( !importFileReaders.isEmpty() ) {
for ( NamedReader namedReader : importFileReaders ) {
LOG.executingImportScript( namedReader.getName() );
importScript( namedReader, exporters );
}
}
}
}
catch (Exception e) {
exceptions.add( e );
LOG.schemaExportUnsuccessful( e );
}
finally {
// release exporters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
for ( Exporter exporter : exporters ) {
try {
exporter.release();
}
catch (Exception ignore) {
}
}
// release the named readers from import scripts
for ( NamedReader namedReader : importFileReaders ) {
try {
namedReader.getReader().close();
}
catch (Exception ignore) {
}
}
LOG.schemaExportComplete();
}
}
private void perform(String[] sqlCommands, List<Exporter> exporters) {
for ( String sqlCommand : sqlCommands ) {
String formatted = formatter.format( sqlCommand );
if ( delimiter != null ) {
formatted += delimiter;
}
sqlStatementLogger.logStatement( sqlCommand, formatter );
for ( Exporter exporter : exporters ) {
try {
exporter.export( formatted );
}
catch (Exception e) {
if ( haltOnError ) {
throw new HibernateException( "Error during DDL export", e );
}
exceptions.add( e );
LOG.unsuccessfulCreate( sqlCommand );
LOG.error( e.getMessage() );
}
if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) {
exceptions.addAll( ( (ExceptionHandlerCollectingImpl) exceptionHandler ).getExceptions() );
}
}
}
private void importScript(NamedReader namedReader, List<Exporter> exporters) throws Exception {
BufferedReader reader = new BufferedReader( namedReader.getReader() );
String[] statements = importSqlCommandExtractor.extractCommands( reader );
if ( statements != null ) {
for ( String statement : statements ) {
if ( statement != null ) {
String trimmedSql = statement.trim();
if ( trimmedSql.endsWith( ";" ) ) {
trimmedSql = trimmedSql.substring( 0, statement.length() - 1 );
}
if ( !StringHelper.isEmpty( trimmedSql ) ) {
try {
for ( Exporter exporter : exporters ) {
if ( exporter.acceptsImportScripts() ) {
exporter.export( trimmedSql );
}
}
}
catch (Exception e) {
if ( haltOnError ) {
throw new ImportScriptException(
"Error during statement execution (file: '"
+ namedReader.getName() + "'): " + trimmedSql, e
);
}
exceptions.add( e );
LOG.unsuccessful( trimmedSql );
LOG.error( e.getMessage() );
}
}
}
}
}
private boolean needsJdbcConnection(EnumSet<TargetType> targetTypes) {
return targetTypes.contains( TargetType.DATABASE );
}
private static class NamedReader {
private final Reader reader;
private final String name;
public NamedReader(String name, InputStream stream) {
this.name = name;
this.reader = new InputStreamReader( stream );
public static TargetDescriptor buildTargetDescriptor(
EnumSet<TargetType> targetTypes,
String outputFile,
ServiceRegistry serviceRegistry) {
final ScriptTargetOutput scriptTarget;
if ( targetTypes.contains( TargetType.SCRIPT ) ) {
if ( outputFile == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
scriptTarget = Helper.interpretScriptTargetSetting(
outputFile,
serviceRegistry.getService( ClassLoaderService.class )
);
}
else {
scriptTarget = null;
}
public Reader getReader() {
return reader;
}
return new TargetDescriptorImpl( targetTypes, scriptTarget );
}
public String getName() {
return name;
}
/**
* For testing use
*/
public void perform(Action action, Metadata metadata, ScriptTargetOutput target) {
doExecution(
action,
false,
metadata,
( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(),
new TargetDescriptorImpl( EnumSet.of( TargetType.SCRIPT ), target )
);
}
public static void main(String[] args) {
try {
final CommandLineArgs commandLineArgs = CommandLineArgs.parseCommandLineArgs( args );
StandardServiceRegistry serviceRegistry = buildStandardServiceRegistry( commandLineArgs );
try {
final MetadataImplementor metadata = buildMetadata( commandLineArgs, serviceRegistry );
SchemaExport schemaExport = new SchemaExport( serviceRegistry, metadata, commandLineArgs.exportSchemas )
.setHaltOnError( commandLineArgs.halt )
.setOutputFile( commandLineArgs.outputFile )
.setDelimiter( commandLineArgs.delimiter )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
.setFormat( commandLineArgs.format );
schemaExport.execute(
commandLineArgs.script,
commandLineArgs.export,
commandLineArgs.drop,
commandLineArgs.create
);
}
finally {
StandardServiceRegistryBuilder.destroy( serviceRegistry );
}
execute( commandLineArgs );
}
catch (Exception e) {
LOG.unableToCreateSchema( e );
@ -571,6 +359,25 @@ public class SchemaExport {
}
}
public static void execute(CommandLineArgs commandLineArgs) throws Exception {
StandardServiceRegistry serviceRegistry = buildStandardServiceRegistry( commandLineArgs );
try {
final MetadataImplementor metadata = buildMetadata( commandLineArgs, serviceRegistry );
new SchemaExport()
.setHaltOnError( commandLineArgs.halt )
.setOutputFile( commandLineArgs.outputFile )
.setDelimiter( commandLineArgs.delimiter )
.setFormat( commandLineArgs.format )
.setManageNamespaces( commandLineArgs.manageNamespaces )
.setImportFiles( commandLineArgs.importFile )
.execute( commandLineArgs.targetTypes, commandLineArgs.action, metadata, serviceRegistry );
}
finally {
StandardServiceRegistryBuilder.destroy( serviceRegistry );
}
}
private static StandardServiceRegistry buildStandardServiceRegistry(CommandLineArgs commandLineArgs)
throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
@ -586,10 +393,6 @@ public class SchemaExport {
}
ssrBuilder.applySettings( properties );
if ( commandLineArgs.importFile != null ) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_IMPORT_FILES, commandLineArgs.importFile );
}
return ssrBuilder.build();
}
@ -653,26 +456,25 @@ public class SchemaExport {
/**
* Returns a List of all Exceptions which occured during the export.
*
* @return A List containig the Exceptions occured during the export
* @return A List containing the Exceptions occured during the export
*/
public List getExceptions() {
return exceptions;
}
private static class CommandLineArgs {
boolean script = true;
boolean drop = false;
boolean create = false;
EnumSet<TargetType> targetTypes;
Action action;
boolean halt = false;
boolean export = true;
boolean format = false;
boolean exportSchemas = false;
boolean manageNamespaces = false;
String delimiter = null;
String outputFile = null;
String importFile = DEFAULT_IMPORT_FILE;
String importFile = SchemaCreatorImpl.DEFAULT_IMPORT_FILE;
String propertiesFile = null;
String cfgXmlFile = null;
@ -683,28 +485,42 @@ public class SchemaExport {
List<String> jarFiles = new ArrayList<String>();
public static CommandLineArgs parseCommandLineArgs(String[] args) {
String targetText = null;
boolean script = true;
boolean export = true;
String actionText = null;
boolean drop = false;
boolean create = false;
CommandLineArgs parsedArgs = new CommandLineArgs();
for ( String arg : args ) {
if ( arg.startsWith( "--" ) ) {
if ( arg.equals( "--quiet" ) ) {
parsedArgs.script = false;
script = false;
}
else if ( arg.equals( "--text" ) ) {
export = false;
}
else if ( arg.equals( "--drop" ) ) {
parsedArgs.drop = true;
drop = true;
}
else if ( arg.equals( "--create" ) ) {
parsedArgs.create = true;
create = true;
}
else if ( arg.startsWith( "--action=" ) ) {
actionText = arg.substring( 9 );
}
else if ( arg.startsWith( "--target=" ) ) {
targetText = arg.substring( 9 );
}
else if ( arg.equals( "--schemas" ) ) {
parsedArgs.exportSchemas = true;
parsedArgs.manageNamespaces = true;
}
else if ( arg.equals( "--haltonerror" ) ) {
parsedArgs.halt = true;
}
else if ( arg.equals( "--text" ) ) {
parsedArgs.export = false;
}
else if ( arg.startsWith( "--output=" ) ) {
parsedArgs.outputFile = arg.substring( 9 );
}
@ -743,7 +559,50 @@ public class SchemaExport {
}
}
if ( actionText == null ) {
parsedArgs.action = Action.interpret( drop, create );
}
else {
if ( drop || create ) {
LOG.warn( "--drop or --create was used; prefer --action=none|create|drop|drop-and-create instead" );
}
parsedArgs.action = Action.parseCommandLineOption( actionText );
}
if ( targetText == null ) {
parsedArgs.targetTypes = TargetTypeHelper.parseLegacyCommandLineOptions( script, export, parsedArgs.outputFile );
}
else {
if ( !script || !export ) {
LOG.warn( "--text or --quiet was used; prefer --target=none|(stdout|database|script)*" );
}
parsedArgs.targetTypes = TargetTypeHelper.parseCommandLineOptions( targetText );
}
return parsedArgs;
}
}
private static class TargetDescriptorImpl implements TargetDescriptor {
private final EnumSet<TargetType> targetTypes;
private final ScriptTargetOutput scriptTarget;
public TargetDescriptorImpl(
EnumSet<TargetType> targetTypes,
ScriptTargetOutput scriptTarget) {
this.targetTypes = targetTypes;
this.scriptTarget = scriptTarget;
}
@Override
public EnumSet<TargetType> getTargetTypes() {
return targetTypes;
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return scriptTarget;
}
}
}

View File

@ -7,12 +7,10 @@
package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.LinkedList;
import java.util.List;
import org.hibernate.HibernateException;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
@ -23,8 +21,13 @@ import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.DelayedDropRegistryNotAvailableImpl;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
@ -53,7 +56,6 @@ import org.apache.tools.ant.types.FileSet;
* &lt;/schemaexport&gt;
* </pre>
*
* @see SchemaExport
* @author Rong C Ou
*/
public class SchemaExportTask extends MatchingTask {
@ -181,22 +183,96 @@ public class SchemaExportTask extends MatchingTask {
@Override
public void execute() throws BuildException {
try {
buildSchemaExport().execute( !quiet, !text, drop, create );
doExecution();
}
catch (HibernateException e) {
throw new BuildException("Schema text failed: " + e.getMessage(), e);
}
catch (FileNotFoundException e) {
throw new BuildException("File not found: " + e.getMessage(), e);
}
catch (IOException e) {
throw new BuildException("IOException : " + e.getMessage(), e);
catch (BuildException e) {
throw e;
}
catch (Exception e) {
throw new BuildException(e);
throw new BuildException( "Error performing export : " + e.getMessage(), e );
}
}
private void doExecution() throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
final MetadataSources metadataSources = new MetadataSources( bsr );
if ( configurationFile != null ) {
ssrBuilder.configure( configurationFile );
}
if ( propertiesFile != null ) {
ssrBuilder.loadProperties( propertiesFile );
}
ssrBuilder.applySettings( getProject().getProperties() );
for ( String fileName : getFiles() ) {
if ( fileName.endsWith(".jar") ) {
metadataSources.addJar( new File( fileName ) );
}
else {
metadataSources.addFile( fileName );
}
}
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
ExportType exportType = ExportType.interpret( drop, create );
Target output = Target.interpret( !quiet, !text );
if ( output.doScript() ) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_ACTION, exportType.getAction() );
final Object scriptTarget;
if ( outputFile == null ) {
scriptTarget = new OutputStreamWriter( System.out );
}
else {
scriptTarget = outputFile;
}
if ( exportType.doCreate() ) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET, scriptTarget );
}
if ( exportType.doDrop() ) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET, scriptTarget );
}
}
if ( output.doExport() ) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_DATABASE_ACTION, exportType.getAction() );
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
if ( implicitNamingStrategy != null ) {
metadataBuilder.applyImplicitNamingStrategy(
(ImplicitNamingStrategy) classLoaderService.classForName( implicitNamingStrategy ).newInstance()
);
}
if ( physicalNamingStrategy != null ) {
metadataBuilder.applyPhysicalNamingStrategy(
(PhysicalNamingStrategy) classLoaderService.classForName( physicalNamingStrategy ).newInstance()
);
}
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
metadata.validate();
SchemaManagementToolCoordinator.process(
metadata,
ssr,
ssr.getService( ConfigurationService.class ).getSettings(),
DelayedDropRegistryNotAvailableImpl.INSTANCE
);
}
private String[] getFiles() {
List<String> files = new LinkedList<String>();
for ( FileSet fileSet : fileSets ) {
@ -215,49 +291,41 @@ public class SchemaExportTask extends MatchingTask {
return ArrayHelper.toStringArray(files);
}
private SchemaExport buildSchemaExport() throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
public enum ExportType {
CREATE( Action.CREATE_ONLY ),
DROP( Action.DROP ),
NONE( Action.NONE ),
BOTH( Action.CREATE );
final MetadataSources metadataSources = new MetadataSources( bsr );
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
private final Action action;
if ( configurationFile != null ) {
ssrBuilder.configure( configurationFile );
ExportType(Action action) {
this.action = action;
}
if ( propertiesFile != null ) {
ssrBuilder.loadProperties( propertiesFile );
}
ssrBuilder.applySettings( getProject().getProperties() );
for ( String fileName : getFiles() ) {
if ( fileName.endsWith(".jar") ) {
metadataSources.addJar( new File( fileName ) );
public boolean doCreate() {
return this == BOTH || this == CREATE;
}
public boolean doDrop() {
return this == BOTH || this == DROP;
}
public Action getAction() {
return action;
}
public static ExportType interpret(boolean justDrop, boolean justCreate) {
if ( justDrop ) {
return ExportType.DROP;
}
else if ( justCreate ) {
return ExportType.CREATE;
}
else {
metadataSources.addFile( fileName );
return ExportType.BOTH;
}
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
if ( implicitNamingStrategy != null ) {
metadataBuilder.applyImplicitNamingStrategy(
(ImplicitNamingStrategy) classLoaderService.classForName( implicitNamingStrategy ).newInstance()
);
}
if ( physicalNamingStrategy != null ) {
metadataBuilder.applyPhysicalNamingStrategy(
(PhysicalNamingStrategy) classLoaderService.classForName( physicalNamingStrategy ).newInstance()
);
}
return new SchemaExport( (MetadataImplementor) metadataBuilder.build() )
.setHaltOnError( haltOnError )
.setOutputFile( outputFile.getPath() )
.setDelimiter( delimiter );
}
}

View File

@ -8,12 +8,14 @@ package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileInputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
@ -24,23 +26,18 @@ import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.internal.TargetDatabaseImpl;
import org.hibernate.tool.schema.internal.TargetFileImpl;
import org.hibernate.tool.schema.internal.TargetStdoutImpl;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.TargetDescriptor;
/**
* A commandline tool to update a database schema. May also be called from inside an application.
@ -51,130 +48,70 @@ import org.hibernate.tool.schema.spi.SchemaMigrator;
public class SchemaUpdate {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaUpdate.class );
private final MetadataImplementor metadata;
private final ServiceRegistry serviceRegistry;
private final JdbcConnectionAccess jdbcConnectionAccess;
private final List<Exception> exceptions = new ArrayList<Exception>();
private String outputFile;
private String delimiter;
private Formatter formatter;
private boolean format;
/**
* Creates a SchemaUpdate object. This form is intended for use from tooling
*
* @param metadata The metadata defining the schema as it should be after update
*
* @throws HibernateException
*/
public SchemaUpdate(MetadataImplementor metadata) {
this( metadata.getMetadataBuildingOptions().getServiceRegistry(), metadata );
public void execute(EnumSet<TargetType> targetTypes, Metadata metadata) {
execute( targetTypes, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
}
/**
* Creates a SchemaUpdate object. This form is intended for use from
* {@code hibernate.hbm2ddl.auto} handling, generally from within the SessionFactory
* ctor.
* <p/>
* Note that the passed ServiceRegistry is expected to be of type
* {@link org.hibernate.service.spi.SessionFactoryServiceRegistry}, although
* any ServiceRegistry type will work as long as it has access to the
* {@link org.hibernate.engine.jdbc.spi.JdbcServices} service.
*
* @param serviceRegistry The ServiceRegistry to use.
* @param metadata The metadata defining the schema as it should be after update
*
* @throws HibernateException
*/
public SchemaUpdate(ServiceRegistry serviceRegistry, MetadataImplementor metadata) throws HibernateException {
this.metadata = metadata;
this.serviceRegistry = serviceRegistry;
this.jdbcConnectionAccess = serviceRegistry.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess();
}
/**
* Execute the schema updates
*
* @param script print all DDL to the console
*/
public void execute(boolean script, boolean doUpdate) {
execute( Target.interpret( script, doUpdate ) );
}
public void execute(Target target) {
LOG.runningHbm2ddlSchemaUpdate();
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( targetTypes.isEmpty() ) {
LOG.debug( "Skipping SchemaExport as no targets were specified" );
return;
}
exceptions.clear();
LOG.runningHbm2ddlSchemaUpdate();
List<org.hibernate.tool.schema.spi.Target> toolTargets = buildToolTargets( target );
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format );
final ConfigurationService cfgService = serviceRegistry.getService( ConfigurationService.class );
final SchemaMigrator schemaMigrator = serviceRegistry.getService( SchemaManagementTool.class )
.getSchemaMigrator( cfgService.getSettings() );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final DatabaseInformation databaseInformation;
try {
databaseInformation = new DatabaseInformationImpl(
serviceRegistry,
serviceRegistry.getService( JdbcEnvironment.class ),
jdbcConnectionAccess,
metadata.getDatabase().getDefaultNamespace().getPhysicalName().getCatalog(),
metadata.getDatabase().getDefaultNamespace().getPhysicalName().getSchema()
);
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Error creating DatabaseInformation for schema migration"
);
}
final ExceptionHandlerCollectingImpl exceptionHandler = new ExceptionHandlerCollectingImpl();
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
exceptionHandler
);
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
try {
schemaMigrator.doMigration( metadata, databaseInformation, true, toolTargets );
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, targetDescriptor );
}
finally {
databaseInformation.cleanup();
exceptions.addAll( exceptionHandler.getExceptions() );
}
}
private List<org.hibernate.tool.schema.spi.Target> buildToolTargets(Target target) {
List<org.hibernate.tool.schema.spi.Target> toolTargets = new ArrayList<org.hibernate.tool.schema.spi.Target>();
if ( target.doScript() ) {
toolTargets.add( new TargetStdoutImpl( delimiter, formatter ) );
}
if ( target.doExport() ) {
toolTargets.add( new TargetDatabaseImpl( jdbcConnectionAccess ) );
}
if ( outputFile != null ) {
LOG.writingGeneratedSchemaToFile( outputFile );
toolTargets.add( new TargetFileImpl( outputFile, delimiter, formatter ) );
}
return toolTargets;
}
/**
* Returns a List of all Exceptions which occured during the export.
*
* @return A List containig the Exceptions occured during the export
* @return A List containing the Exceptions occured during the export
*/
public List getExceptions() {
return exceptions;
}
public void setHaltOnError(boolean haltOnError) {
public SchemaUpdate setHaltOnError(boolean haltOnError) {
return this;
}
public void setFormat(boolean format) {
formatter = (format ? FormatStyle.DDL : FormatStyle.NONE).getFormatter();
public SchemaUpdate setFormat(boolean format) {
this.format = format;
return this;
}
public void setOutputFile(String outputFile) {
public SchemaUpdate setOutputFile(String outputFile) {
this.outputFile = outputFile;
return this;
}
/**
@ -183,8 +120,9 @@ public class SchemaUpdate {
* @param delimiter The delimiter
*
*/
public void setDelimiter(String delimiter) {
public SchemaUpdate setDelimiter(String delimiter) {
this.delimiter = delimiter;
return this;
}
public static void main(String[] args) {
@ -195,10 +133,10 @@ public class SchemaUpdate {
try {
final MetadataImplementor metadata = buildMetadata( parsedArgs, serviceRegistry );
final SchemaUpdate schemaUpdate = new SchemaUpdate( metadata );
schemaUpdate.setOutputFile( parsedArgs.outFile );
schemaUpdate.setDelimiter( parsedArgs.delimiter );
schemaUpdate.execute( parsedArgs.script, parsedArgs.doUpdate );
new SchemaUpdate()
.setOutputFile( parsedArgs.outputFile )
.setDelimiter( parsedArgs.delimiter )
.execute( parsedArgs.targetTypes, metadata, serviceRegistry );
}
finally {
StandardServiceRegistryBuilder.destroy( serviceRegistry );
@ -263,13 +201,11 @@ public class SchemaUpdate {
}
private static class CommandLineArgs {
boolean script = true;
// If true then execute db updates, otherwise just generate and display updates
boolean doUpdate = true;
EnumSet<TargetType> targetTypes;
String propertiesFile = null;
String cfgXmlFile = null;
String outFile = null;
String outputFile = null;
String delimiter = null;
String implicitNamingStrategyImplName = null;
@ -281,10 +217,20 @@ public class SchemaUpdate {
public static CommandLineArgs parseCommandLineArgs(String[] args) {
final CommandLineArgs parsedArgs = new CommandLineArgs();
String targetText = null;
boolean script = true;
boolean doUpdate = true;
for ( String arg : args ) {
if ( arg.startsWith( "--" ) ) {
if ( arg.equals( "--quiet" ) ) {
parsedArgs.script = false;
script = false;
}
else if ( arg.startsWith( "--text" ) ) {
doUpdate = false;
}
else if ( arg.startsWith( "--target=" ) ) {
targetText = arg.substring( 9 );
}
else if ( arg.startsWith( "--properties=" ) ) {
parsedArgs.propertiesFile = arg.substring( 13 );
@ -292,11 +238,8 @@ public class SchemaUpdate {
else if ( arg.startsWith( "--config=" ) ) {
parsedArgs.cfgXmlFile = arg.substring( 9 );
}
else if ( arg.startsWith( "--text" ) ) {
parsedArgs.doUpdate = false;
}
else if ( arg.startsWith( "--output=" ) ) {
parsedArgs.outFile = arg.substring( 9 );
parsedArgs.outputFile = arg.substring( 9 );
}
else if ( arg.startsWith( "--naming=" ) ) {
DeprecationLogger.DEPRECATION_LOGGER.logDeprecatedNamingStrategyArgument();
@ -321,6 +264,16 @@ public class SchemaUpdate {
}
}
if ( targetText == null ) {
parsedArgs.targetTypes = TargetTypeHelper.parseLegacyCommandLineOptions( script, doUpdate, parsedArgs.outputFile );
}
else {
if ( !script || !doUpdate ) {
LOG.warn( "--text or --quiet was used; prefer --target=none|(stdout|database|script)*" );
}
parsedArgs.targetTypes = TargetTypeHelper.parseCommandLineOptions( targetText );
}
return parsedArgs;
}
}

View File

@ -12,7 +12,11 @@ package org.hibernate.tool.hbm2ddl;
*
* @author Brett Meyer
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public class SchemaUpdateCommand {
private final String sql;
private final boolean quiet;

View File

@ -182,11 +182,11 @@ public class SchemaUpdateTask extends MatchingTask {
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
final SchemaUpdate su = new SchemaUpdate( metadata );
su.setOutputFile( outputFile.getPath() );
su.setDelimiter( delimiter );
su.setHaltOnError( haltOnError );
su.execute( !quiet, !text );
new SchemaUpdate()
.setOutputFile( outputFile.getPath() )
.setDelimiter( delimiter )
.setHaltOnError( haltOnError )
.execute( TargetTypeHelper.parseLegacyCommandLineOptions( !quiet, !text, outputFile.getPath() ), metadata );
}
catch (HibernateException e) {
throw new BuildException( "Schema text failed: " + e.getMessage(), e );

View File

@ -8,11 +8,13 @@ package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileInputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
@ -24,17 +26,14 @@ import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.jboss.logging.Logger;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
/**
* A commandline tool to update a database schema. May also be called from
@ -43,56 +42,27 @@ import org.jboss.logging.Logger;
* @author Christoph Sturm
*/
public class SchemaValidator {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
SchemaValidator.class.getName()
);
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaValidator.class );
private final ServiceRegistry serviceRegistry;
private final MetadataImplementor metadata;
private final JdbcConnectionAccess jdbcConnectionAccess;
public SchemaValidator(MetadataImplementor metadata) {
this( metadata.getMetadataBuildingOptions().getServiceRegistry(), metadata );
public void validate(Metadata metadata) {
validate( metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
}
public SchemaValidator(ServiceRegistry serviceRegistry, MetadataImplementor metadata) {
this.serviceRegistry = serviceRegistry;
this.metadata = metadata;
this.jdbcConnectionAccess = serviceRegistry.getService( JdbcServices.class ).getBootstrapJdbcConnectionAccess();
}
/**
* Perform the validations.
*/
public void validate() {
@SuppressWarnings("unchecked")
public void validate(Metadata metadata, ServiceRegistry serviceRegistry) {
LOG.runningSchemaValidator();
final ConfigurationService cfgService = serviceRegistry.getService( ConfigurationService.class );
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final DatabaseInformation databaseInformation;
try {
databaseInformation = new DatabaseInformationImpl(
serviceRegistry,
serviceRegistry.getService( JdbcEnvironment.class ),
jdbcConnectionAccess,
metadata.getDatabase().getDefaultNamespace().getPhysicalName().getCatalog(),
metadata.getDatabase().getDefaultNamespace().getPhysicalName().getSchema()
);
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Error creating DatabaseInformation for schema validation"
);
}
try {
serviceRegistry.getService( SchemaManagementTool.class ).getSchemaValidator( cfgService.getSettings() )
.doValidation( metadata, databaseInformation );
}
finally {
databaseInformation.cleanup();
}
Map config = new HashMap();
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions(
config,
ExceptionHandlerHaltImpl.INSTANCE
);
tool.getSchemaValidator( config ).doValidation( metadata, executionOptions );
}
public static void main(String[] args) {
@ -102,7 +72,7 @@ public class SchemaValidator {
try {
final MetadataImplementor metadata = buildMetadata( parsedArgs, serviceRegistry );
new SchemaValidator( serviceRegistry, metadata ).validate();
new SchemaValidator().validate( metadata, serviceRegistry );
}
finally {
StandardServiceRegistryBuilder.destroy( serviceRegistry );

View File

@ -128,7 +128,7 @@ public class SchemaValidatorTask extends MatchingTask {
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
new SchemaValidator( ssr, metadata ).validate();
new SchemaValidator().validate( metadata, ssr );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );

View File

@ -8,7 +8,11 @@ package org.hibernate.tool.hbm2ddl;
/**
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class ScriptExporter implements Exporter {
@Override
public boolean acceptsImportScripts() {

View File

@ -16,7 +16,11 @@ import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
* connection.
*
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class SuppliedConnectionHelper implements ConnectionHelper {
private Connection connection;
private boolean toggleAutoCommit;

View File

@ -19,7 +19,11 @@ import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
* was externally provided to us.
*
* @author Steve Ebersole
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
class SuppliedConnectionProviderConnectionHelper implements ConnectionHelper {
private ConnectionProvider provider;
private Connection connection;

View File

@ -23,7 +23,11 @@ import static org.hibernate.internal.CoreLogging.messageLogger;
*
* @author Christoph Sturm
* @author Max Rydahl Andersen
*
* @deprecated Everything in this package has been replaced with
* {@link org.hibernate.tool.schema.spi.SchemaManagementTool} and friends.
*/
@Deprecated
public class TableMetadata {
private static final CoreMessageLogger LOG = messageLogger( TableMetadata.class );

View File

@ -7,12 +7,26 @@
package org.hibernate.tool.hbm2ddl;
/**
* Describes the types of targets for create, drop and update actions
*
* @author Steve Ebersole
*/
public enum Target {
/**
* Export to the database.
*/
EXPORT,
/**
* Write to a script file.
*/
SCRIPT,
/**
* export nowhere
*/
NONE,
/**
* Do both {@link #EXPORT} and {@link #SCRIPT}
*/
BOTH;
public boolean doExport() {

View File

@ -0,0 +1,55 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.hbm2ddl;
import java.util.EnumSet;
import org.hibernate.tool.schema.TargetType;
/**
* @author Steve Ebersole
*/
public class TargetTypeHelper {
public static EnumSet<TargetType> parseLegacyCommandLineOptions(boolean script, boolean export, String outputFile) {
final EnumSet<TargetType> options = EnumSet.noneOf( TargetType.class );
final Target target = Target.interpret( script, export );
if ( outputFile != null ) {
options.add( TargetType.SCRIPT );
}
if ( target.doScript() ) {
options.add( TargetType.STDOUT );
}
if ( target.doExport() ) {
options.add( TargetType.DATABASE );
}
return options;
}
public static EnumSet<TargetType> parseCommandLineOptions(String targetTypeText) {
final EnumSet<TargetType> options = EnumSet.noneOf( TargetType.class );
if ( !targetTypeText.equalsIgnoreCase( "none" ) ) {
for ( String option : targetTypeText.split( "," ) ) {
if ( option.equalsIgnoreCase( "database" ) ) {
options.add( TargetType.DATABASE );
}
else if ( option.equalsIgnoreCase( "stdout" ) ) {
options.add( TargetType.STDOUT );
}
else if ( option.equalsIgnoreCase( "script" ) ) {
options.add( TargetType.SCRIPT );
}
else {
throw new IllegalArgumentException( "Unrecognized --target option : " + option );
}
}
}
return options;
}
}

View File

@ -0,0 +1,191 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.internal.util.StringHelper;
/**
* The allowable actions in terms of schema tooling. Covers the unified JPA and HBM2DDL
* cases.
*
* @author Steve Ebersole
*/
public enum Action {
/**
* No action will be performed. Valid in JPA; compatible with Hibernate's
* hbm2ddl action of the same name..
*/
NONE( "none" ),
/**
* Database creation will be generated. This is an action introduced by JPA. Hibernate's
* legacy hbm2ddl had no such action - its "create" action is actually equivalent to {@link #CREATE}
* <p/>
* Corresponds to a call to {@link org.hibernate.tool.schema.spi.SchemaCreator}
*/
CREATE_ONLY( "create", "create-only" ),
/**
* Database dropping will be generated.
* <p/>
* Corresponds to a call to {@link org.hibernate.tool.schema.spi.SchemaDropper}
*/
DROP( "drop" ),
/**
* Database dropping will be generated followed by database creation.
* <p/>
* Corresponds to a call to {@link org.hibernate.tool.schema.spi.SchemaDropper}
* followed immediately by a call to {@link org.hibernate.tool.schema.spi.SchemaCreator}
*/
CREATE( "drop-and-create", "create" ),
/**
* Drop the schema and recreate it on SessionFactory startup. Additionally, drop the
* schema on SessionFactory shutdown.
* <p/>
* Has no corresponding call to a SchemaManagementTool delegate. It is equivalent to a
*
* <p/>
* While this is a valid option for auto schema tooling, it is not a valid action to pass to
* SchemaManagementTool; instead it would be expected that the caller to SchemaManagementTool
* would split this into 2 separate requests for:<ol>
* <li>{@link #CREATE}</li>
* <li>{@link #DROP}</li>
* </ol>
*/
CREATE_DROP( null, "create-drop" ),
/**
* "validate" (Hibernate only) - validate the database schema
*/
VALIDATE( null, "validate" ),
/**
* "update" (Hibernate only) - update (alter) the database schema
*/
UPDATE( null, "update" );
private final String externalJpaName;
private final String externalHbm2ddlName;
Action(String externalJpaName) {
this( externalJpaName, externalJpaName );
}
Action(String externalJpaName, String externalHbm2ddlName) {
this.externalJpaName = externalJpaName;
this.externalHbm2ddlName = externalHbm2ddlName;
}
public boolean isValidJpaAction() {
return externalJpaName != null;
}
@Override
public String toString() {
return getClass().getSimpleName() + "(externalJpaName=" + externalJpaName + ", externalHbm2ddlName=" + externalHbm2ddlName + ")";
}
/**
* Used when processing JPA configuration to interpret the user config values.
*
* @param value The encountered config value
*
* @return The matching enum value. An empty value will return {@link #NONE}.
*
* @throws IllegalArgumentException If the incoming value is unrecognized
*/
public static Action interpretJpaSetting(Object value) {
if ( value == null ) {
return NONE;
}
if ( Action.class.isInstance( value ) ) {
return (Action) value;
}
final String name = value.toString();
if ( StringHelper.isEmpty( name ) || NONE.externalJpaName.equals( name ) ) {
// default is NONE
return NONE;
}
// prefer JPA external names
for ( Action action : values() ) {
if ( action.externalJpaName == null ) {
continue;
}
if ( action.externalJpaName.equals( name ) ) {
return action;
}
}
// then check hbm2ddl names
for ( Action action : values() ) {
if ( action.externalHbm2ddlName == null ) {
continue;
}
if ( action.externalHbm2ddlName.equals( name ) ) {
return action;
}
}
throw new IllegalArgumentException( "Unrecognized JPA schema generation action value : " + value );
}
/**
* Used when processing JPA configuration to interpret the user config values.
*
* @param value The encountered config value
*
* @return The matching enum value. An empty value will return {@link #NONE}.
*
* @throws IllegalArgumentException If the incoming value is unrecognized
*/
public static Action interpretHbm2ddlSetting(Object value) {
if ( value == null ) {
return NONE;
}
if ( Action.class.isInstance( value ) ) {
return hbm2ddlSetting( (Action) value );
}
final String name = value.toString();
if ( StringHelper.isEmpty( name ) || NONE.externalJpaName.equals( name ) ) {
// default is NONE
return NONE;
}
// prefer hbm2ddl names
for ( Action action : values() ) {
if ( action.externalHbm2ddlName == null ) {
continue;
}
if ( action.externalHbm2ddlName.equals( name ) ) {
return hbm2ddlSetting( action );
}
}
// then check JPA external names
for ( Action action : values() ) {
if ( action.externalJpaName == null ) {
continue;
}
if ( action.externalJpaName.equals( name ) ) {
return hbm2ddlSetting( action );
}
}
throw new IllegalArgumentException( "Unrecognized legacy `hibernate.hbm2ddl.auto` value : " + value );
}
private static Action hbm2ddlSetting(Action action) {
return action;
}
}

View File

@ -0,0 +1,98 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.util.StringHelper;
/**
* Enumeration of the various types of sources understood by various SchemaManagementTooling
* delegates.
*
* @see AvailableSettings#HBM2DDL_CREATE_SOURCE
* @see AvailableSettings#HBM2DDL_DROP_SOURCE
*
* @author Steve Ebersole
*/
public enum SourceType {
/**
* "metadata" - The O/RM metadata is used as the exclusive source for generation
*/
METADATA( "metadata" ),
/**
* "script" - External DDL script(s) are used as the exclusive source for generation. The scripts for schema
* creation and dropping come from different sources. The creation DDL script is identified by the
* {@value AvailableSettings#HBM2DDL_CREATE_SCRIPT_SOURCE} setting; the drop DDL script is identified by the
* {@value AvailableSettings#HBM2DDL_DROP_SCRIPT_SOURCE} setting.
*
* @see AvailableSettings#HBM2DDL_CREATE_SCRIPT_SOURCE
* @see AvailableSettings#HBM2DDL_DROP_SCRIPT_SOURCE
*/
SCRIPT( "script" ),
/**
* "metadata-then-scripts" - Both the O/RM metadata and external DDL scripts are used as sources for generation,
* with the O/RM metadata being applied first.
*
* @see #METADATA
* @see #SCRIPT
*/
METADATA_THEN_SCRIPT( "metadata-then-script" ),
/**
* "scripts-then-metadata" - Both the O/RM metadata and external DDL scripts are used as sources for generation,
* with the commands from the external DDL script(s) being applied first
*
* @see #SCRIPT
* @see #METADATA
*/
SCRIPT_THEN_METADATA( "script-then-metadata" );
private final String externalName;
private SourceType(String externalName) {
this.externalName = externalName;
}
/**
* Used when processing JPA configuration to interpret the user config value
*
* @param value The encountered user config value
*
* @return The matching enum value. An empty value will return {@code null}.
*
* @throws IllegalArgumentException If the incoming value is unrecognized
*/
public static SourceType interpret(Object value, SourceType defaultValue) {
if ( value == null ) {
return defaultValue;
}
if ( SourceType.class.isInstance( value ) ) {
return (SourceType) value;
}
final String name = value.toString();
if ( StringHelper.isEmpty( name ) ) {
// empty is in fact valid as means to interpret default value based on other settings
return defaultValue;
}
if ( METADATA.externalName.equals( value ) ) {
return METADATA;
}
else if ( SCRIPT.externalName.equals( value ) ) {
return SCRIPT;
}
else if ( METADATA_THEN_SCRIPT.externalName.equals( value ) ) {
return METADATA_THEN_SCRIPT;
}
else if ( SCRIPT_THEN_METADATA.externalName.equals( value ) ) {
return SCRIPT_THEN_METADATA;
}
throw new IllegalArgumentException( "Unrecognized schema generation source-type value : " + value );
}
}

View File

@ -0,0 +1,25 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema;
/**
* @author Steve Ebersole
*/
public enum TargetType {
/**
* Export to the database.
*/
DATABASE,
/**
* Write to a script file.
*/
SCRIPT,
/**
* Write to System.out
*/
STDOUT;
}

View File

@ -29,13 +29,24 @@ import org.hibernate.tool.schema.extract.spi.TableInformation;
* @author Steve Ebersole
*/
public class DatabaseInformationImpl implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final InformationExtractor extractor;
private final ExtractionContext extractionContext;
private final JdbcEnvironment jdbcEnvironment;
private final ExtractionContext extractionContext;
private final InformationExtractor extractor;
private final Map<QualifiedSequenceName,SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
public DatabaseInformationImpl(
JdbcEnvironment jdbcEnvironment,
InformationExtractor extractor,
ExtractionContext extractionContext) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = extractionContext;
this.extractor = extractor;
// legacy code did initialize sequences...
initializeSequences();
}
public DatabaseInformationImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
@ -109,10 +120,6 @@ public class DatabaseInformationImpl implements DatabaseInformation, ExtractionC
);
}
@Override
public void registerTable(TableInformation tableInformation) {
}
@Override
public SequenceInformation getSequenceInformation(
Identifier catalogName,

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.tool.schema.extract.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
@ -18,6 +19,7 @@ import org.hibernate.boot.model.relational.QualifiedTableName;
* @author Teodor Danciu
* @author Steve Ebersole
*/
@Incubating
public interface DatabaseInformation {
/**
* Check to see if the given schema already exists.
@ -37,7 +39,7 @@ public interface DatabaseInformation {
*
* @return The table information. May return {@code null} if not found.
*/
public TableInformation getTableInformation(Identifier catalogName, Identifier schemaName, Identifier tableName);
TableInformation getTableInformation(Identifier catalogName, Identifier schemaName, Identifier tableName);
/**
* Obtain reference to the named TableInformation
@ -47,7 +49,7 @@ public interface DatabaseInformation {
*
* @return The table information. May return {@code null} if not found.
*/
public TableInformation getTableInformation(Namespace.Name schemaName, Identifier tableName);
TableInformation getTableInformation(Namespace.Name schemaName, Identifier tableName);
/**
* Obtain reference to the named TableInformation
@ -56,9 +58,7 @@ public interface DatabaseInformation {
*
* @return The table information. May return {@code null} if not found.
*/
public TableInformation getTableInformation(QualifiedTableName tableName);
public void registerTable(TableInformation tableInformation);
TableInformation getTableInformation(QualifiedTableName tableName);
/**
* Obtain reference to the named SequenceInformation
@ -69,7 +69,7 @@ public interface DatabaseInformation {
*
* @return The sequence information. May return {@code null} if not found.
*/
public SequenceInformation getSequenceInformation(
SequenceInformation getSequenceInformation(
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName);
@ -82,7 +82,7 @@ public interface DatabaseInformation {
*
* @return The sequence information. May return {@code null} if not found.
*/
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName);
SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName);
/**
* Obtain reference to the named SequenceInformation
@ -91,7 +91,7 @@ public interface DatabaseInformation {
*
* @return The sequence information. May return {@code null} if not found.
*/
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName);
SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName);
/**
* Check to see if the given catalog already exists.

View File

@ -9,6 +9,7 @@ package org.hibernate.tool.schema.extract.spi;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
import org.hibernate.boot.model.relational.QualifiedTableName;
@ -21,6 +22,7 @@ import org.hibernate.service.ServiceRegistry;
*
* @author Steve Ebersole
*/
@Incubating
public interface ExtractionContext {
ServiceRegistry getServiceRegistry();
JdbcEnvironment getJdbcEnvironment();
@ -34,6 +36,7 @@ public interface ExtractionContext {
* In conjunction with {@link #getDatabaseObjectAccess()} provides access to
* information about known database objects to the extractor.
*/
@Incubating
interface DatabaseObjectAccess {
TableInformation locateTableInformation(QualifiedTableName tableName);
SequenceInformation locateSequenceInformation(QualifiedSequenceName sequenceName);

View File

@ -6,8 +6,7 @@
*/
package org.hibernate.tool.schema.extract.spi;
import java.util.Collection;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.tool.schema.extract.internal.TableInformationImpl;
@ -21,6 +20,7 @@ import org.hibernate.tool.schema.extract.internal.TableInformationImpl;
*
* @author Steve Ebersole
*/
@Incubating
public interface InformationExtractor {
/**
@ -53,7 +53,7 @@ public interface InformationExtractor {
*
* @return table info for the matching table
*/
public TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName);
TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName);
/**
* Return information about column for the given table. Typically called from the TableInformation itself
@ -64,7 +64,7 @@ public interface InformationExtractor {
*
* @return The extracted column information
*/
public ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier);
ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier);
/**
* Extract information about the given table's primary key.
@ -73,7 +73,7 @@ public interface InformationExtractor {
*
* @return The extracted primary key information
*/
public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation);
PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation);
/**
* Extract information about indexes defined against the given table. Typically called from the TableInformation
@ -83,7 +83,7 @@ public interface InformationExtractor {
*
* @return The extracted index information
*/
public Iterable<IndexInformation> getIndexes(TableInformation tableInformation);
Iterable<IndexInformation> getIndexes(TableInformation tableInformation);
/**
* Extract information about foreign keys defined on the given table (targeting or point-at other tables).
@ -93,5 +93,5 @@ public interface InformationExtractor {
*
* @return The extracted foreign-key information
*/
public Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation);
Iterable<ForeignKeyInformation> getForeignKeys(TableInformation tableInformation);
}

View File

@ -0,0 +1,76 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.tool.schema.Action;
/**
* For JPA-style schema-gen handling database and script target handing are
* configured individually. This tuple allows grouping the action for both
* targets.
*
* @author Steve Ebersole
*/
public class ActionGrouping {
private final Action databaseAction;
private final Action scriptAction;
public ActionGrouping(Action databaseAction, Action scriptAction) {
this.databaseAction = databaseAction;
this.scriptAction = scriptAction;
}
public Action getDatabaseAction() {
return databaseAction;
}
public Action getScriptAction() {
return scriptAction;
}
public boolean needsJdbcAccess() {
if ( databaseAction != Action.NONE ) {
// to execute the commands
return true;
}
switch ( scriptAction ) {
case VALIDATE:
case UPDATE: {
// to get the existing metadata
return true;
}
}
return false;
}
public static ActionGrouping interpret(Map configurationValues) {
// interpret the JPA settings first
Action databaseAction = Action.interpretJpaSetting(
configurationValues.get( AvailableSettings.HBM2DDL_DATABASE_ACTION )
);
Action scriptAction = Action.interpretJpaSetting(
configurationValues.get( AvailableSettings.HBM2DDL_SCRIPTS_ACTION )
);
// if no JPA settings were specified, look at the legacy HBM2DDL_AUTO setting...
if ( databaseAction == Action.NONE && scriptAction == Action.NONE ) {
final Action hbm2ddlAutoAction = Action.interpretHbm2ddlSetting(
configurationValues.get( AvailableSettings.HBM2DDL_AUTO )
);
if ( hbm2ddlAutoAction != Action.NONE ) {
databaseAction = hbm2ddlAutoAction;
}
}
return new ActionGrouping( databaseAction, scriptAction );
}
}

View File

@ -0,0 +1,32 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler;
/**
* @author Steve Ebersole
*/
public class ExceptionHandlerCollectingImpl implements ExceptionHandler {
private final List<CommandAcceptanceException> exceptions = new ArrayList<CommandAcceptanceException>();
public ExceptionHandlerCollectingImpl() {
}
@Override
public void handleException(CommandAcceptanceException exception) {
exceptions.add( exception );
}
public List<CommandAcceptanceException> getExceptions() {
return exceptions;
}
}

View File

@ -0,0 +1,35 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.util.Locale;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* @author Steve Ebersole
*/
public class ExceptionHandlerHaltImpl implements ExceptionHandler {
/**
* Singleton access
*/
public static final ExceptionHandlerHaltImpl INSTANCE = new ExceptionHandlerHaltImpl();
@Override
public void handleException(CommandAcceptanceException exception) {
throw new SchemaManagementException(
String.format(
Locale.ROOT,
"Halting on error : %s",
exception.getMessage()
),
exception
);
}
}

View File

@ -0,0 +1,33 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class ExceptionHandlerLoggedImpl implements ExceptionHandler {
private static final Logger log = Logger.getLogger( ExceptionHandlerLoggedImpl.class );
/**
* Singleton access
*/
public static final ExceptionHandlerLoggedImpl INSTANCE = new ExceptionHandlerLoggedImpl();
@Override
public void handleException(CommandAcceptanceException exception) {
log.warnf(
exception,
"GenerationTarget encountered exception accepting command : %s",
exception.getMessage()
);
}
}

View File

@ -0,0 +1,159 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.io.File;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.sql.SQLException;
import java.util.Map;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.internal.exec.ImprovedDatabaseInformationImpl;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContext;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromFile;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromReader;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToUrl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToWriter;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class Helper {
private static final Logger log = Logger.getLogger( Helper.class );
public static boolean includesCreate(ActionGrouping actions) {
return includesCreate( actions.getDatabaseAction() )
|| includesCreate( actions.getScriptAction() );
}
public static boolean includesCreate(Action action) {
return action == Action.CREATE_ONLY
|| action == Action.CREATE
|| action == Action.CREATE_DROP;
}
private static boolean includesDrop(ActionGrouping actions) {
return includesDrop( actions.getDatabaseAction() )
|| includesDrop( actions.getScriptAction() );
}
public static boolean includesDrop(Action action) {
return action == Action.DROP
|| action == Action.CREATE
|| action == Action.CREATE_DROP;
}
public static ScriptSourceInput interpretScriptSourceSetting(Object scriptSourceSetting, ClassLoaderService classLoaderService) {
if ( Reader.class.isInstance( scriptSourceSetting ) ) {
return new ScriptSourceInputFromReader( (Reader) scriptSourceSetting );
}
else {
final String scriptSourceSettingString = scriptSourceSetting.toString();
log.debugf( "Attempting to resolve script source setting : %s", scriptSourceSettingString );
// setting could be either:
// 1) string URL representation (i.e., "file://...")
// 2) relative file path (resource lookup)
// 3) absolute file path
log.trace( "Trying as URL..." );
// ClassLoaderService.locateResource() first tries the given resource name as url form...
final URL url = classLoaderService.locateResource( scriptSourceSettingString );
if ( url != null ) {
return new ScriptSourceInputFromUrl( url );
}
// assume it is a File path
final File file = new File( scriptSourceSettingString );
return new ScriptSourceInputFromFile( file );
}
}
public static ScriptTargetOutput interpretScriptTargetSetting(Object scriptTargetSetting, ClassLoaderService classLoaderService) {
if ( scriptTargetSetting == null ) {
return null;
}
else if ( Writer.class.isInstance( scriptTargetSetting ) ) {
return new ScriptTargetOutputToWriter( (Writer) scriptTargetSetting );
}
else {
final String scriptTargetSettingString = scriptTargetSetting.toString();
log.debugf( "Attempting to resolve script source setting : %s", scriptTargetSettingString );
// setting could be either:
// 1) string URL representation (i.e., "file://...")
// 2) relative file path (resource lookup)
// 3) absolute file path
log.trace( "Trying as URL..." );
// ClassLoaderService.locateResource() first tries the given resource name as url form...
final URL url = classLoaderService.locateResource( scriptTargetSettingString );
if ( url != null ) {
return new ScriptTargetOutputToUrl( url );
}
// assume it is a File path
final File file = new File( scriptTargetSettingString );
return new ScriptTargetOutputToFile( file );
}
}
public static boolean interpretNamespaceHandling(Map configurationValues) {
// prefer the JPA setting...
return ConfigurationHelper.getBoolean(
AvailableSettings.HBM2DLL_CREATE_SCHEMAS,
configurationValues,
ConfigurationHelper.getBoolean(
AvailableSettings.HBM2DLL_CREATE_NAMESPACES,
configurationValues,
false
)
);
}
public static boolean interpretFormattingEnabled(Map configurationValues) {
return ConfigurationHelper.getBoolean(
AvailableSettings.FORMAT_SQL,
configurationValues,
false
);
}
public static DatabaseInformation buildDatabaseInformation(
ServiceRegistry serviceRegistry,
JdbcConnectionContext connectionContext,
Namespace.Name defaultNamespace) {
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
try {
return new ImprovedDatabaseInformationImpl(
serviceRegistry,
jdbcEnvironment,
connectionContext,
defaultNamespace
);
}
catch (SQLException e) {
throw jdbcEnvironment.getSqlExceptionHelper().convert( e, "Unable to build DatabaseInformation" );
}
}
}

View File

@ -6,21 +6,44 @@
*/
package org.hibernate.tool.schema.internal;
import java.sql.Connection;
import java.util.Map;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolver;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryAwareService;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToScript;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionAccessProvidedConnectionImpl;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContext;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContextNonSharedImpl;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilterProvider;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.SchemaValidator;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CONNECTION;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DELIMITER;
/**
* The standard Hibernate implementation for performing schema management.
@ -28,42 +51,231 @@ import org.hibernate.tool.schema.spi.SchemaValidator;
* @author Steve Ebersole
*/
public class HibernateSchemaManagementTool implements SchemaManagementTool, ServiceRegistryAwareService {
private static final Logger log = Logger.getLogger( HibernateSchemaManagementTool.class );
private ServiceRegistry serviceRegistry;
@Override
public void injectServices(ServiceRegistryImplementor serviceRegistry) {
this.serviceRegistry = serviceRegistry;
}
@Override
public SchemaCreator getSchemaCreator(Map options) {
return new SchemaCreatorImpl( getSchemaFilterProvider( options ).getCreateFilter() );
return new SchemaCreatorImpl( this, getSchemaFilterProvider( options ).getCreateFilter() );
}
@Override
public SchemaDropper getSchemaDropper(Map options) {
return new SchemaDropperImpl( getSchemaFilterProvider( options ).getDropFilter() );
return new SchemaDropperImpl( this, getSchemaFilterProvider( options ).getDropFilter() );
}
@Override
public SchemaMigrator getSchemaMigrator(Map options) {
return new SchemaMigratorImpl( getSchemaFilterProvider( options ).getMigrateFilter() );
return new SchemaMigratorImpl( this, getSchemaFilterProvider( options ).getMigrateFilter() );
}
@Override
public SchemaValidator getSchemaValidator(Map options) {
final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
return new SchemaValidatorImpl( getSchemaFilterProvider( options ).getValidateFilter(), dialect );
return new SchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
}
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
final Object configuredOption = (options == null)
? null
: options.get( AvailableSettings.SCHEMA_FILTER_PROVIDER );
: options.get( AvailableSettings.HBM2DDL_FILTER_PROVIDER );
return serviceRegistry.getService( StrategySelector.class ).resolveDefaultableStrategy(
SchemaFilterProvider.class,
configuredOption,
DefaultSchemaFilterProvider.INSTANCE
);
}
@Override
public void injectServices(ServiceRegistryImplementor serviceRegistry) {
this.serviceRegistry = serviceRegistry;
GenerationTarget[] buildGenerationTargets(
TargetDescriptor targetDescriptor,
JdbcContext jdbcContext,
Map options,
boolean needsAutoCommit) {
final String scriptDelimiter = ConfigurationHelper.getString( HBM2DDL_DELIMITER, options );
final GenerationTarget[] targets = new GenerationTarget[ targetDescriptor.getTargetTypes().size() ];
int index = 0;
if ( targetDescriptor.getTargetTypes().contains( TargetType.STDOUT ) ) {
targets[index] = new GenerationTargetToStdout( scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.SCRIPT ) ) {
if ( targetDescriptor.getScriptTargetOutput() == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
targets[index] = new GenerationTargetToScript( targetDescriptor.getScriptTargetOutput(), scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = new GenerationTargetToDatabase(
new JdbcConnectionContextNonSharedImpl(
jdbcContext.getJdbcConnectionAccess(),
jdbcContext.getSqlStatementLogger(),
needsAutoCommit
)
);
}
return targets;
}
GenerationTarget[] buildGenerationTargets(
TargetDescriptor targetDescriptor,
JdbcConnectionContext connectionContext,
Map options) {
final String scriptDelimiter = ConfigurationHelper.getString( HBM2DDL_DELIMITER, options );
final GenerationTarget[] targets = new GenerationTarget[ targetDescriptor.getTargetTypes().size() ];
int index = 0;
if ( targetDescriptor.getTargetTypes().contains( TargetType.STDOUT ) ) {
targets[index] = new GenerationTargetToStdout( scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.SCRIPT ) ) {
if ( targetDescriptor.getScriptTargetOutput() == null ) {
throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" );
}
targets[index] = new GenerationTargetToScript( targetDescriptor.getScriptTargetOutput(), scriptDelimiter );
index++;
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = new GenerationTargetToDatabase( connectionContext );
}
return targets;
}
public JdbcContext resolveJdbcContext(Map configurationValues) {
final JdbcContextBuilder jdbcContextBuilder = new JdbcContextBuilder( serviceRegistry );
// see if a specific connection has been provided
final Connection providedConnection = (Connection) configurationValues.get( HBM2DDL_CONNECTION );
if ( providedConnection != null ) {
jdbcContextBuilder.jdbcConnectionAccess = new JdbcConnectionAccessProvidedConnectionImpl( providedConnection );
}
// see if a specific Dialect override has been provided...
final String explicitDbName = (String) configurationValues.get( AvailableSettings.HBM2DDL_DB_NAME );
if ( StringHelper.isNotEmpty( explicitDbName ) ) {
final String explicitDbMajor = (String) configurationValues.get( AvailableSettings.HBM2DDL_DB_MAJOR_VERSION );
final String explicitDbMinor = (String) configurationValues.get( AvailableSettings.HBM2DDL_DB_MINOR_VERSION );
final Dialect indicatedDialect = serviceRegistry.getService( DialectResolver.class ).resolveDialect(
new DialectResolutionInfo() {
@Override
public String getDatabaseName() {
return explicitDbName;
}
@Override
public int getDatabaseMajorVersion() {
return StringHelper.isEmpty( explicitDbMajor )
? NO_VERSION
: Integer.parseInt( explicitDbMajor );
}
@Override
public int getDatabaseMinorVersion() {
return StringHelper.isEmpty( explicitDbMinor )
? NO_VERSION
: Integer.parseInt( explicitDbMinor );
}
@Override
public String getDriverName() {
return null;
}
@Override
public int getDriverMajorVersion() {
return NO_VERSION;
}
@Override
public int getDriverMinorVersion() {
return NO_VERSION;
}
}
);
if ( indicatedDialect == null ) {
log.debugf(
"Unable to resolve indicated Dialect resolution info (%s, %s, %s)",
explicitDbName,
explicitDbMajor,
explicitDbMinor
);
}
else {
jdbcContextBuilder.dialect = indicatedDialect;
}
}
return jdbcContextBuilder.buildJdbcContext();
}
public ServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
private static class JdbcContextBuilder {
private final SqlStatementLogger sqlStatementLogger;
private JdbcConnectionAccess jdbcConnectionAccess;
private Dialect dialect;
public JdbcContextBuilder(ServiceRegistry serviceRegistry) {
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
this.sqlStatementLogger = jdbcServices.getSqlStatementLogger();
this.dialect = jdbcServices.getJdbcEnvironment().getDialect();
this.jdbcConnectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
}
public JdbcContext buildJdbcContext() {
return new JdbcContextImpl( jdbcConnectionAccess, dialect, sqlStatementLogger );
}
}
public static class JdbcContextImpl implements JdbcContext {
private final JdbcConnectionAccess jdbcConnectionAccess;
private final Dialect dialect;
private final SqlStatementLogger sqlStatementLogger;
private JdbcContextImpl(
JdbcConnectionAccess jdbcConnectionAccess,
Dialect dialect,
SqlStatementLogger sqlStatementLogger) {
this.jdbcConnectionAccess = jdbcConnectionAccess;
this.dialect = dialect;
this.sqlStatementLogger = sqlStatementLogger;
}
@Override
public JdbcConnectionAccess getJdbcConnectionAccess() {
return jdbcConnectionAccess;
}
@Override
public Dialect getDialect() {
return dialect;
}
@Override
public SqlStatementLogger getSqlStatementLogger() {
return sqlStatementLogger;
}
}
}

View File

@ -6,10 +6,13 @@
*/
package org.hibernate.tool.schema.internal;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.boot.Metadata;
@ -20,16 +23,43 @@ import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.InitCommand;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputNonExistentImpl;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_LOAD_SCRIPT_SOURCE;
import static org.hibernate.tool.schema.internal.Helper.interpretScriptSourceSetting;
/**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some
@ -38,118 +68,143 @@ import org.hibernate.tool.schema.spi.Target;
* @author Steve Ebersole
*/
public class SchemaCreatorImpl implements SchemaCreator {
private static final CoreMessageLogger log = CoreLogging.messageLogger( SchemaCreatorImpl.class );
private final SchemaFilter filter;
public SchemaCreatorImpl( SchemaFilter filter ) {
this.filter = filter;
public static final String DEFAULT_IMPORT_FILE = "/import.sql";
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
public SchemaCreatorImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
public SchemaCreatorImpl() {
this( DefaultSchemaFilter.INSTANCE );
public SchemaCreatorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter;
}
public SchemaCreatorImpl(ServiceRegistry serviceRegistry) {
this( serviceRegistry, DefaultSchemaFilter.INSTANCE );
}
public SchemaCreatorImpl(ServiceRegistry serviceRegistry, SchemaFilter schemaFilter) {
SchemaManagementTool smt = serviceRegistry.getService( SchemaManagementTool.class );
if ( smt == null || !HibernateSchemaManagementTool.class.isInstance( smt ) ) {
smt = new HibernateSchemaManagementTool();
( (HibernateSchemaManagementTool) smt ).injectServices( (ServiceRegistryImplementor) serviceRegistry );
}
this.tool = (HibernateSchemaManagementTool) smt;
this.schemaFilter = schemaFilter;
}
@Override
public void doCreation(Metadata metadata, boolean createNamespaces, List<Target> targets) throws SchemaManagementException {
doCreation( metadata, createNamespaces, targets.toArray( new Target[ targets.size() ] ) );
public void doCreation(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets(
targetDescriptor,
jdbcContext,
options.getConfigurationValues(),
true
);
doCreation( metadata, jdbcContext.getDialect(), options, sourceDescriptor, targets );
}
@Override
public void doCreation(Metadata metadata, boolean createNamespaces, Dialect dialect, List<Target> targets) throws SchemaManagementException {
doCreation( metadata, createNamespaces, dialect, targets.toArray( new Target[ targets.size() ] ) );
}
public void doCreation(
Metadata metadata,
Dialect dialect,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateCreationCommands(Metadata metadata, boolean createNamespaces) {
final ArrayList<String> commands = new ArrayList<String>();
doCreation(
metadata,
createNamespaces,
new Target() {
@Override
public boolean acceptsImportScriptActions() {
return true;
}
@Override
public void prepare() {
}
@Override
public void accept(String action) {
commands.add( action );
}
@Override
public void release() {
}
try {
performCreation( metadata, dialect, options, sourceDescriptor, targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
);
return commands;
}
/**
* For temporary use from JPA schema generation
*
* @param metadata The metadata for which to generate the creation commands.
* @param createNamespaces Should the schema(s)/catalog(s) actually be created as well ({@code CREATE SCHEMA})?
* @param dialect Allow explicitly passing the Dialect to use.
*
* @return The generation commands
*/
public List<String> generateCreationCommands(Metadata metadata, boolean createNamespaces, Dialect dialect) {
final ArrayList<String> commands = new ArrayList<String>();
doCreation(
metadata,
createNamespaces,
dialect,
new Target() {
@Override
public boolean acceptsImportScriptActions() {
return true;
}
@Override
public void prepare() {
}
@Override
public void accept(String action) {
commands.add( action );
}
@Override
public void release() {
}
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
);
return commands;
}
}
}
@Override
public void doCreation(Metadata metadata, boolean createNamespaces, Target... targets)
throws SchemaManagementException {
doCreation(
metadata,
createNamespaces,
metadata.getDatabase().getJdbcEnvironment().getDialect(),
targets
);
private void performCreation(
Metadata metadata,
Dialect dialect,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final ImportSqlCommandExtractor commandExtractor = tool.getServiceRegistry().getService( ImportSqlCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
switch ( sourceDescriptor.getSourceType() ) {
case SCRIPT: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
break;
}
case METADATA: {
createFromMetadata( metadata, options, dialect, formatter, targets );
break;
}
case METADATA_THEN_SCRIPT: {
createFromMetadata( metadata, options, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
break;
}
case SCRIPT_THEN_METADATA: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
createFromMetadata( metadata, options, dialect, formatter, targets );
}
}
applyImportSources( options, commandExtractor, format, targets );
}
@Override
public void doCreation(Metadata metadata, boolean createNamespaces, Dialect dialect, Target... targets)
throws SchemaManagementException {
public void createFromScript(
ScriptSourceInput scriptSourceInput,
ImportSqlCommandExtractor commandExtractor,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
scriptSourceInput.prepare();
try {
for ( String command : scriptSourceInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
scriptSourceInput.release();
}
}
public void createFromMetadata(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false;
if ( createNamespaces ) {
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToCreateSchemas = true;
}
@ -161,10 +216,6 @@ public class SchemaCreatorImpl implements SchemaCreator {
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
for ( Target target : targets ) {
target.prepare();
}
final Set<String> exportIdentifiers = new HashSet<String>( 50 );
// first, create each catalog/schema
@ -172,7 +223,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
@ -182,8 +233,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
targets,
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) )
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
exportedCatalogs.add( catalogLogicalName );
}
@ -191,10 +244,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( tryToCreateSchemas && namespace.getPhysicalName().getSchema() != null ) {
applySqlStrings(
targets,
dialect.getCreateSchemaCommand(
namespace.getPhysicalName().getSchema().render( dialect )
)
dialect.getCreateSchemaCommand( namespace.getPhysicalName().getSchema().render( dialect ) ),
formatter,
options,
targets
);
}
}
@ -209,11 +262,13 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
targets,
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings(
auxiliaryDatabaseObject,
metadata
)
),
formatter,
options,
targets
);
}
}
@ -221,23 +276,25 @@ public class SchemaCreatorImpl implements SchemaCreator {
// then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
// sequences
for ( Sequence sequence : namespace.getSequences() ) {
if ( !filter.includeSequence( sequence ) ) {
if ( !schemaFilter.includeSequence( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings(
targets,
dialect.getCreateSequenceStrings(
jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), dialect ),
sequence.getInitialValue(),
sequence.getIncrementSize()
)
),
formatter,
options,
targets
);
}
@ -246,13 +303,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( !table.isPhysicalTable() ){
continue;
}
if ( !filter.includeTable( table ) ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
targets,
dialect.getTableExporter().getSqlCreateStrings( table, metadata )
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
options,
targets
);
}
@ -266,8 +325,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
final Index index = (Index) indexItr.next();
checkExportIdentifier( index, exportIdentifiers );
applySqlStrings(
targets,
dialect.getIndexExporter().getSqlCreateStrings( index, metadata )
dialect.getIndexExporter().getSqlCreateStrings( index, metadata ),
formatter,
options,
targets
);
}
@ -277,8 +338,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
final UniqueKey uniqueKey = (UniqueKey) ukItr.next();
checkExportIdentifier( uniqueKey, exportIdentifiers );
applySqlStrings(
targets,
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata )
dialect.getUniqueKeyExporter().getSqlCreateStrings( uniqueKey, metadata ),
formatter,
options,
targets
);
}
}
@ -294,8 +357,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
while ( fkItr.hasNext() ) {
final ForeignKey foreignKey = (ForeignKey) fkItr.next();
applySqlStrings(
targets,
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata )
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata ),
formatter,
options,
targets
);
}
}
@ -303,22 +368,22 @@ public class SchemaCreatorImpl implements SchemaCreator {
// next, create all "after table" auxiliary objects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) && !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect )
&& !auxiliaryDatabaseObject.beforeTablesOnCreation() ) {
checkExportIdentifier( auxiliaryDatabaseObject, exportIdentifiers );
applySqlStrings(
targets,
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( auxiliaryDatabaseObject, metadata )
dialect.getAuxiliaryDatabaseObjectExporter().getSqlCreateStrings( auxiliaryDatabaseObject, metadata ),
formatter,
options,
targets
);
}
}
// and finally add all init commands
for ( InitCommand initCommand : database.getInitCommands() ) {
applySqlStrings( targets, initCommand.getInitCommands() );
}
for ( Target target : targets ) {
target.release();
// todo: this should alo probably use the DML formatter...
applySqlStrings( initCommand.getInitCommands(), formatter, options, targets );
}
}
@ -330,15 +395,212 @@ public class SchemaCreatorImpl implements SchemaCreator {
exportIdentifiers.add( exportIdentifier );
}
private static void applySqlStrings(Target[] targets, String... sqlStrings) {
private static void applySqlStrings(
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( Target target : targets ) {
for ( String sqlString : sqlStrings ) {
target.accept( sqlString );
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
try {
for ( GenerationTarget target : targets ) {
target.accept( formatter.format( sqlString ) );
}
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
}
}
private void applyImportSources(
ExecutionOptions options,
ImportSqlCommandExtractor commandExtractor,
boolean format,
GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = tool.getServiceRegistry();
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
// I have had problems applying the formatter to these imported statements.
// and legacy SchemaExport did not format them, so doing same here
//final Formatter formatter = format ? DDLFormatterImpl.INSTANCE : FormatStyle.NONE.getFormatter();
final Formatter formatter = FormatStyle.NONE.getFormatter();
final Object importScriptSetting = options.getConfigurationValues().get( HBM2DDL_LOAD_SCRIPT_SOURCE );
if ( importScriptSetting != null ) {
final ScriptSourceInput importScriptInput = interpretScriptSourceSetting( importScriptSetting, classLoaderService );
log.executingImportScript( importScriptInput.toString() );
importScriptInput.prepare();
try {
for ( String command : importScriptInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
importScriptInput.release();
}
}
final String importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
options.getConfigurationValues(),
DEFAULT_IMPORT_FILE
);
for ( String currentFile : importFiles.split( "," ) ) {
final String resourceName = currentFile.trim();
final ScriptSourceInput importScriptInput = interpretLegacyImportScriptSetting( resourceName, classLoaderService );
importScriptInput.prepare();
try {
log.executingImportScript( importScriptInput.toString() );
for ( String command : importScriptInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
importScriptInput.release();
}
}
}
private ScriptSourceInput interpretLegacyImportScriptSetting(
String resourceName,
ClassLoaderService classLoaderService) {
try {
final URL resourceUrl = classLoaderService.locateResource( resourceName );
if ( resourceUrl == null ) {
return ScriptSourceInputNonExistentImpl.INSTANCE;
}
else {
return new ScriptSourceInputFromUrl( resourceUrl );
}
}
catch (Exception e) {
throw new SchemaManagementException( "Error resolving legacy import resource : " + resourceName, e );
}
}
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateCreationCommands(Metadata metadata, final boolean manageNamespaces) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
};
createFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
public void doCreation(
Metadata metadata,
final boolean manageNamespaces,
GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry();
doCreation(
metadata,
serviceRegistry,
serviceRegistry.getService( ConfigurationService.class ).getSettings(),
manageNamespaces,
targets
);
}
public void doCreation(
Metadata metadata,
final ServiceRegistry serviceRegistry,
final Map settings,
final boolean manageNamespaces,
GenerationTarget... targets) {
doCreation(
metadata,
serviceRegistry.getService( JdbcEnvironment.class ).getDialect(),
new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return settings;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
},
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
},
targets
);
}
private static class JournalingGenerationTarget implements GenerationTarget {
private final ArrayList<String> commands = new ArrayList<String>();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
commands.add( command );
}
@Override
public void release() {
}
}
}

View File

@ -6,10 +6,13 @@
*/
package org.hibernate.tool.schema.internal;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.boot.Metadata;
@ -21,13 +24,39 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionAccessConnectionProviderImpl;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContextNonSharedImpl;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.DelayedDropAction;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
/**
* This is functionally nothing more than the creation script from the older SchemaExport class (plus some
@ -36,91 +65,132 @@ import org.hibernate.tool.schema.spi.Target;
* @author Steve Ebersole
*/
public class SchemaDropperImpl implements SchemaDropper {
private static final Logger log = Logger.getLogger( SchemaDropperImpl.class );
private final SchemaFilter filter;
public SchemaDropperImpl( SchemaFilter filter ) {
this.filter = filter;
}
public SchemaDropperImpl() {
this( DefaultSchemaFilter.INSTANCE );
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
public SchemaDropperImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
/**
* Intended for use from JPA schema export code.
*
* @param metadata The metadata for which to generate drop commands
* @param dropNamespaces Should drop schema/catalog command be generated?
* @param dialect Allow explicitly specifying the dialect.
*
* @return The commands
*/
public Iterable<String> generateDropCommands(
MetadataImplementor metadata,
boolean dropNamespaces,
Dialect dialect) {
final ArrayList<String> commands = new ArrayList<String>();
doDrop(
metadata,
dropNamespaces,
dialect,
new Target() {
public SchemaDropperImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter;
}
@Override
public boolean acceptsImportScriptActions() {
return true;
}
public SchemaDropperImpl(ServiceRegistry serviceRegistry) {
this( serviceRegistry, DefaultSchemaFilter.INSTANCE );
}
@Override
public void prepare() {
}
public SchemaDropperImpl(ServiceRegistry serviceRegistry, SchemaFilter schemaFilter) {
SchemaManagementTool smt = serviceRegistry.getService( SchemaManagementTool.class );
if ( smt == null || !HibernateSchemaManagementTool.class.isInstance( smt ) ) {
smt = new HibernateSchemaManagementTool();
( (HibernateSchemaManagementTool) smt ).injectServices( (ServiceRegistryImplementor) serviceRegistry );
}
@Override
public void accept(String action) {
commands.add( action );
}
this.tool = (HibernateSchemaManagementTool) smt;
this.schemaFilter = schemaFilter;
}
@Override
public void release() {
}
@Override
public void doDrop(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, jdbcContext.getDialect(), sourceDescriptor, targets );
}
public void doDrop(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performDrop( metadata, options, dialect, sourceDescriptor, targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
);
return commands;
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
@Override
public void doDrop(Metadata metadata, boolean dropNamespaces, List<Target> targets)
throws SchemaManagementException {
doDrop( metadata, dropNamespaces, targets.toArray( new Target[targets.size()] ) );
private void performDrop(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final ImportSqlCommandExtractor commandExtractor = tool.getServiceRegistry().getService( ImportSqlCommandExtractor.class );
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
if ( sourceDescriptor.getSourceType() == SourceType.SCRIPT ) {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
}
else {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, options, targets );
dropFromMetadata( metadata, options, dialect, formatter, targets );
}
}
@Override
public void doDrop(Metadata metadata, boolean dropNamespaces, Dialect dialect, List<Target> targets)
throws SchemaManagementException {
doDrop( metadata, dropNamespaces, dialect, targets.toArray( new Target[targets.size()] ) );
private void dropFromScript(
ScriptSourceInput scriptSourceInput,
ImportSqlCommandExtractor commandExtractor,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
scriptSourceInput.prepare();
try {
for ( String command : scriptSourceInput.read( commandExtractor ) ) {
applySqlString( command, formatter, options, targets );
}
}
finally {
scriptSourceInput.release();
}
}
@Override
public void doDrop(Metadata metadata, boolean dropNamespaces, Target... targets) throws SchemaManagementException {
doDrop(
metadata,
dropNamespaces,
metadata.getDatabase().getJdbcEnvironment().getDialect(),
targets
);
}
@Override
public void doDrop(Metadata metadata, boolean dropNamespaces, Dialect dialect, Target... targets)
throws SchemaManagementException {
private void dropFromMetadata(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
boolean tryToDropCatalogs = false;
boolean tryToDropSchemas = false;
if ( dropNamespaces ) {
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToDropSchemas = true;
}
@ -129,10 +199,6 @@ public class SchemaDropperImpl implements SchemaDropper {
}
}
for ( Target target : targets ) {
target.prepare();
}
final Set<String> exportIdentifiers = new HashSet<String>( 50 );
// NOTE : init commands are irrelevant for dropping...
@ -146,35 +212,37 @@ public class SchemaDropperImpl implements SchemaDropper {
}
applySqlStrings(
targets,
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata )
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
formatter,
options,
targets
);
}
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping( targets, namespace, metadata );
applyConstraintDropping( namespace, metadata, formatter, options, targets );
// now it's safe to drop the tables
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( targets, dialect.getTableExporter().getSqlDropStrings( table, metadata ) );
applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata ), formatter, options,targets );
}
for ( Sequence sequence : namespace.getSequences() ) {
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( targets, dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata ) );
applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata ), formatter, options, targets );
}
}
@ -187,8 +255,10 @@ public class SchemaDropperImpl implements SchemaDropper {
}
applySqlStrings(
targets,
auxiliaryDatabaseObject.sqlDropStrings( jdbcEnvironment.getDialect() )
auxiliaryDatabaseObject.sqlDropStrings( jdbcEnvironment.getDialect() ),
formatter,
options,
targets
);
}
@ -197,15 +267,18 @@ public class SchemaDropperImpl implements SchemaDropper {
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) {
applySqlStrings(
targets, dialect.getDropSchemaCommand(
dialect.getDropSchemaCommand(
namespace.getPhysicalName().getSchema().render( dialect )
)
),
formatter,
options,
targets
);
}
if ( tryToDropCatalogs ) {
@ -214,23 +287,26 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) ) {
applySqlStrings(
targets,
dialect.getDropCatalogCommand(
catalogPhysicalName.render( dialect )
)
),
formatter,
options,
targets
);
exportedCatalogs.add( catalogLogicalName );
}
}
}
}
for ( Target target : targets ) {
target.release();
}
}
private void applyConstraintDropping(Target[] targets, Namespace namespace, Metadata metadata) {
private void applyConstraintDropping(
Namespace namespace,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
if ( !dialect.dropConstraints() ) {
@ -241,7 +317,7 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
@ -249,8 +325,10 @@ public class SchemaDropperImpl implements SchemaDropper {
while ( fks.hasNext() ) {
final ForeignKey foreignKey = (ForeignKey) fks.next();
applySqlStrings(
targets,
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata )
dialect.getForeignKeyExporter().getSqlDropStrings( foreignKey, metadata ),
formatter,
options,
targets
);
}
}
@ -264,14 +342,216 @@ public class SchemaDropperImpl implements SchemaDropper {
exportIdentifiers.add( exportIdentifier );
}
private static void applySqlStrings(Target[] targets, String... sqlStrings) {
private static void applySqlStrings(
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( Target target : targets ) {
for ( String sqlString : sqlStrings ) {
target.accept( sqlString );
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, formatter, options, targets );
}
}
private static void applySqlString(
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
for ( GenerationTarget target : targets ) {
try {
target.accept( formatter.format( sqlString ) );
}
catch (CommandAcceptanceException e) {
options.getExceptionHandler().handleException( e );
}
}
}
/**
* For testing...
*
* @param metadata The metadata for which to generate the creation commands.
*
* @return The generation commands
*/
public List<String> generateDropCommands(Metadata metadata, final boolean manageNamespaces) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions()
.getServiceRegistry();
final Dialect dialect = serviceRegistry.getService( JdbcEnvironment.class ).getDialect();
final ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return Collections.emptyMap();
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
};
dropFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
@Override
public DelayedDropAction buildDelayedAction(
Metadata metadata,
ExecutionOptions options,
SourceDescriptor sourceDescriptor) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
doDrop( metadata, options, tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect(), sourceDescriptor, target );
return new DelayedDropActionImpl( target.commands );
}
/**
* For tests
*/
public void doDrop(Metadata metadata, boolean manageNamespaces, GenerationTarget... targets) {
final ServiceRegistry serviceRegistry = ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry();
doDrop(
metadata,
serviceRegistry,
serviceRegistry.getService( ConfigurationService.class ).getSettings(),
manageNamespaces,
targets
);
}
/**
* For tests
*/
public void doDrop(
Metadata metadata,
final ServiceRegistry serviceRegistry,
final Map settings,
final boolean manageNamespaces,
GenerationTarget... targets) {
if ( targets == null || targets.length == 0 ) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( settings );
targets = new GenerationTarget[] {
new GenerationTargetToDatabase(
new JdbcConnectionContextNonSharedImpl(
jdbcContext.getJdbcConnectionAccess(),
jdbcContext.getSqlStatementLogger(),
true
)
)
};
}
doDrop(
metadata,
new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return manageNamespaces;
}
@Override
public Map getConfigurationValues() {
return settings;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
},
serviceRegistry.getService( JdbcEnvironment.class ).getDialect(),
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
},
targets
);
}
private static class JournalingGenerationTarget implements GenerationTarget {
private final ArrayList<String> commands = new ArrayList<String>();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
commands.add( command );
}
@Override
public void release() {
}
}
private static class DelayedDropActionImpl implements DelayedDropAction, Serializable {
private static final CoreMessageLogger log = CoreLogging.messageLogger( DelayedDropActionImpl.class );
private final ArrayList<String> commands;
public DelayedDropActionImpl(ArrayList<String> commands) {
this.commands = commands;
}
@Override
public void perform(ServiceRegistry serviceRegistry) {
log.startingDelayedSchemaDrop();
final ConnectionProvider connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
if ( connectionProvider == null ) {
// todo : log or error?
throw new SchemaManagementException(
"Could not build JDBC Connection context to drop schema on SessionFactory close"
);
}
final GenerationTargetToDatabase target = new GenerationTargetToDatabase(
new JdbcConnectionContextNonSharedImpl(
new JdbcConnectionAccessConnectionProviderImpl( connectionProvider ),
serviceRegistry.getService( JdbcServices.class ).getSqlStatementLogger(),
true
)
);
target.prepare();
try {
for ( String command : commands ) {
try {
target.accept( command );
}
catch (CommandAcceptanceException e) {
// implicitly we do not "halt on error", but we do want to
// report the problem
log.unsuccessfulSchemaManagementCommand( command );
log.debugf( e, "Error performing delayed DROP command [%s]", command );
}
}
}
finally {
target.release();
}
}
}

View File

@ -8,7 +8,6 @@ package org.hibernate.tool.schema.internal;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.hibernate.boot.Metadata;
@ -19,11 +18,11 @@ import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.ForeignKey;
@ -31,62 +30,136 @@ import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.tool.hbm2ddl.UniqueConstraintSchemaUpdateStrategy;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation;
import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContextSharedImpl;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.jboss.logging.Logger;
import static org.hibernate.cfg.AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY;
/**
* @author Steve Ebersole
*/
public class SchemaMigratorImpl implements SchemaMigrator {
private final SchemaFilter filter;
private static final Logger log = Logger.getLogger( SchemaMigratorImpl.class );
public SchemaMigratorImpl( SchemaFilter filter ) {
this.filter = filter;
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
public SchemaMigratorImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
public SchemaMigratorImpl() {
this( DefaultSchemaFilter.INSTANCE );
public SchemaMigratorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter;
}
/**
* For testing...
*/
public void setUniqueConstraintStrategy(UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy) {
this.uniqueConstraintStrategy = uniqueConstraintStrategy;
}
@Override
public void doMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
boolean createNamespaces,
List<Target> targets) throws SchemaManagementException {
for ( Target target : targets ) {
target.prepare();
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
return;
}
doMigrationToTargets( metadata, existingDatabase, createNamespaces, targets );
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
for ( Target target : targets ) {
target.release();
final JdbcConnectionContextSharedImpl connectionContext = new JdbcConnectionContextSharedImpl(
jdbcContext.getJdbcConnectionAccess(),
jdbcContext.getSqlStatementLogger(),
targetDescriptor.getTargetTypes().contains( TargetType.DATABASE )
);
try {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
connectionContext,
metadata.getDatabase().getDefaultNamespace().getName()
);
final GenerationTarget[] targets = tool.buildGenerationTargets(
targetDescriptor,
connectionContext,
options.getConfigurationValues()
);
try {
doMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
}
finally {
try {
databaseInformation.cleanup();
}
catch (Exception e) {
log.debug( "Problem releasing DatabaseInformation : " + e.getMessage() );
}
}
}
finally {
connectionContext.reallyRelease();
}
}
protected void doMigrationToTargets(
public void doMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
boolean createNamespaces,
List<Target> targets) {
ExecutionOptions options,
Dialect dialect,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performMigration( metadata, existingDatabase, options, dialect, targets );
}
finally {
for ( GenerationTarget target : targets ) {
try {
target.release();
}
catch (Exception e) {
log.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
private void performMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
Dialect dialect,
GenerationTarget... targets) {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
final Set<String> exportIdentifiers = new HashSet<String>( 50 );
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
final Dialect dialect = jdbcEnvironment.getDialect();
// Drop all AuxiliaryDatabaseObjects
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects() ) {
@ -95,9 +168,11 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
applySqlStrings(
true,
dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings( auxiliaryDatabaseObject, metadata ),
targets,
true
formatter,
options,
targets
);
}
@ -111,27 +186,28 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
applySqlStrings(
auxiliaryDatabaseObject.sqlCreateStrings( jdbcEnvironment.getDialect() ),
targets,
true
true,
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
formatter,
options,
targets
);
}
boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false;
if ( createNamespaces ) {
if ( database.getJdbcEnvironment().getDialect().canCreateSchema() ) {
if ( options.shouldManageNamespaces() ) {
if ( dialect.canCreateSchema() ) {
tryToCreateSchemas = true;
}
if ( database.getJdbcEnvironment().getDialect().canCreateCatalog() ) {
if ( dialect.canCreateCatalog() ) {
tryToCreateCatalogs = true;
}
}
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
continue;
}
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
@ -139,16 +215,14 @@ public class SchemaMigratorImpl implements SchemaMigrator {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName ) && !existingDatabase
.catalogExists( catalogLogicalName ) ) {
if ( catalogPhysicalName != null && !exportedCatalogs.contains( catalogLogicalName )
&& !existingDatabase.catalogExists( catalogLogicalName ) ) {
applySqlStrings(
database.getJdbcEnvironment().getDialect().getCreateCatalogCommand(
catalogPhysicalName.render(
database.getJdbcEnvironment().getDialect()
)
),
targets,
false
false,
dialect.getCreateCatalogCommand( catalogPhysicalName.render( dialect ) ),
formatter,
options,
targets
);
exportedCatalogs.add( catalogLogicalName );
}
@ -158,13 +232,11 @@ public class SchemaMigratorImpl implements SchemaMigrator {
&& namespace.getPhysicalName().getSchema() != null
&& !existingDatabase.schemaExists( namespace.getName() ) ) {
applySqlStrings(
database.getJdbcEnvironment().getDialect().getCreateSchemaCommand(
namespace.getPhysicalName()
.getSchema()
.render( database.getJdbcEnvironment().getDialect() )
),
targets,
false
false,
dialect.getCreateSchemaCommand( namespace.getPhysicalName().getSchema().render( dialect ) ),
formatter,
options,
targets
);
}
}
@ -173,7 +245,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
@ -182,10 +254,10 @@ public class SchemaMigratorImpl implements SchemaMigrator {
continue;
}
if ( tableInformation == null ) {
createTable( table, metadata, targets );
createTable( table, dialect, metadata, formatter, options, targets );
}
else {
migrateTable( table, tableInformation, targets, metadata );
migrateTable( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
@ -193,7 +265,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
if ( !schemaFilter.includeTable( table ) ) {
continue;
}
@ -202,8 +274,8 @@ public class SchemaMigratorImpl implements SchemaMigrator {
continue;
}
applyIndexes( table, tableInformation, metadata, targets );
applyUniqueKeys( table, tableInformation, metadata, targets );
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );
applyUniqueKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
for ( Sequence sequence : namespace.getSequences() ) {
@ -215,12 +287,14 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
applySqlStrings(
database.getJdbcEnvironment().getDialect().getSequenceExporter().getSqlCreateStrings(
false,
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
metadata
),
targets,
false
formatter,
options,
targets
);
}
}
@ -232,7 +306,7 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
continue;
}
applyForeignKeys( table, tableInformation, metadata, targets );
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
@ -246,46 +320,66 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
applySqlStrings(
auxiliaryDatabaseObject.sqlCreateStrings( jdbcEnvironment.getDialect() ),
targets,
true
true,
auxiliaryDatabaseObject.sqlCreateStrings( dialect ),
formatter,
options,
targets
);
}
}
private void createTable(Table table, Metadata metadata, List<Target> targets) {
private void createTable(
Table table,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
applySqlStrings(
metadata.getDatabase().getDialect().getTableExporter().getSqlCreateStrings( table, metadata ),
targets,
false
false,
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
options,
targets
);
}
private void migrateTable(
Table table,
TableInformation tableInformation,
List<Target> targets,
Metadata metadata) {
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
final Dialect dialect = jdbcEnvironment.getDialect();
//noinspection unchecked
applySqlStrings(
false,
table.sqlAlterStrings(
dialect,
metadata,
tableInformation,
getDefaultCatalogName( database ),
getDefaultSchemaName( database )
getDefaultCatalogName( database, dialect ),
getDefaultSchemaName( database, dialect )
),
targets,
false
formatter,
options,
targets
);
}
private void applyIndexes(Table table, TableInformation tableInformation, Metadata metadata, List<Target> targets) {
final Exporter<Index> exporter = metadata.getDatabase().getJdbcEnvironment().getDialect().getIndexExporter();
private void applyIndexes(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
final Exporter<Index> exporter = dialect.getIndexExporter();
final Iterator<Index> indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) {
@ -302,9 +396,11 @@ public class SchemaMigratorImpl implements SchemaMigrator {
}
applySqlStrings(
false,
exporter.getSqlCreateStrings( index, metadata ),
targets,
false
formatter,
options,
targets
);
}
}
@ -313,9 +409,14 @@ public class SchemaMigratorImpl implements SchemaMigrator {
return tableInformation.getIndex( Identifier.toIdentifier( index.getName() ) );
}
private UniqueConstraintSchemaUpdateStrategy uniqueConstraintStrategy;
private void applyUniqueKeys(Table table, TableInformation tableInfo, Metadata metadata, List<Target> targets) {
private void applyUniqueKeys(
Table table,
TableInformation tableInfo,
Dialect dialect,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( uniqueConstraintStrategy == null ) {
uniqueConstraintStrategy = determineUniqueConstraintSchemaUpdateStrategy( metadata );
}
@ -324,7 +425,6 @@ public class SchemaMigratorImpl implements SchemaMigrator {
return;
}
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
final Exporter<Constraint> exporter = dialect.getUniqueKeyExporter();
final Iterator ukItr = table.getUniqueKeyIterator();
@ -342,16 +442,20 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( uniqueConstraintStrategy == UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) {
applySqlStrings(
true,
exporter.getSqlDropStrings( uniqueKey, metadata ),
targets,
true
formatter,
options,
targets
);
}
applySqlStrings(
true,
exporter.getSqlCreateStrings( uniqueKey, metadata ),
targets,
true
formatter,
options,
targets
);
}
}
@ -362,19 +466,18 @@ public class SchemaMigratorImpl implements SchemaMigrator {
.getService( ConfigurationService.class );
return UniqueConstraintSchemaUpdateStrategy.interpret(
cfgService.getSetting(
AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY,
StandardConverters.STRING
)
cfgService.getSetting( UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, StandardConverters.STRING )
);
}
private void applyForeignKeys(
Table table,
TableInformation tableInformation,
Dialect dialect,
Metadata metadata,
List<Target> targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( !dialect.hasAlterTable() ) {
return;
}
@ -405,9 +508,11 @@ public class SchemaMigratorImpl implements SchemaMigrator {
// in old SchemaUpdate code, this was the trigger to "create"
applySqlStrings(
false,
exporter.getSqlCreateStrings( foreignKey, metadata ),
targets,
false
formatter,
options,
targets
);
}
}
@ -432,52 +537,67 @@ public class SchemaMigratorImpl implements SchemaMigrator {
exportIdentifiers.add( exportIdentifier );
}
private static void applySqlStrings(String[] sqlStrings, List<Target> targets, boolean quiet) {
private static void applySqlStrings(
boolean quiet,
String[] sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
for ( String sqlString : sqlStrings ) {
applySqlString( sqlString, targets, quiet );
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
private static void applySqlString(String sqlString, List<Target> targets, boolean quiet) {
if ( sqlString == null ) {
private static void applySqlString(
boolean quiet,
String sqlString,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( StringHelper.isEmpty( sqlString ) ) {
return;
}
for ( Target target : targets ) {
for ( GenerationTarget target : targets ) {
try {
target.accept( sqlString );
target.accept( formatter.format( sqlString ) );
}
catch (SchemaManagementException e) {
catch (CommandAcceptanceException e) {
if ( !quiet ) {
throw e;
options.getExceptionHandler().handleException( e );
}
// otherwise ignore the exception
}
}
}
private static void applySqlStrings(Iterator<String> sqlStrings, List<Target> targets, boolean quiet) {
private static void applySqlStrings(
boolean quiet,
Iterator<String> sqlStrings,
Formatter formatter,
ExecutionOptions options,
GenerationTarget... targets) {
if ( sqlStrings == null ) {
return;
}
while ( sqlStrings.hasNext() ) {
final String sqlString = sqlStrings.next();
applySqlString( sqlString, targets, quiet );
applySqlString( quiet, sqlString, formatter, options, targets );
}
}
private String getDefaultCatalogName(Database database) {
private String getDefaultCatalogName(Database database, Dialect dialect) {
final Identifier identifier = database.getDefaultNamespace().getPhysicalName().getCatalog();
return identifier == null ? null : identifier.render( database.getJdbcEnvironment().getDialect() );
return identifier == null ? null : identifier.render( dialect );
}
private String getDefaultSchemaName(Database database) {
private String getDefaultSchemaName(Database database, Dialect dialect) {
final Identifier identifier = database.getDefaultNamespace().getPhysicalName().getSchema();
return identifier == null ? null : identifier.render( database.getJdbcEnvironment().getDialect() );
return identifier == null ? null : identifier.render( dialect );
}
}

View File

@ -21,26 +21,66 @@ import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.JdbcConnectionContextNonSharedImpl;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaValidator;
import org.hibernate.type.descriptor.JdbcTypeNameMapper;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class SchemaValidatorImpl implements SchemaValidator {
private static final Logger log = Logger.getLogger( SchemaValidatorImpl.class );
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
private final Dialect dialect;
public SchemaValidatorImpl(SchemaFilter schemaFilter, Dialect dialect) {
this.schemaFilter = schemaFilter;
this.dialect = dialect;
public SchemaValidatorImpl(HibernateSchemaManagementTool tool) {
this( tool, DefaultSchemaFilter.INSTANCE );
}
public SchemaValidatorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
this.tool = tool;
this.schemaFilter = schemaFilter;
}
@Override
public void doValidation(Metadata metadata, DatabaseInformation databaseInformation) {
public void doValidation(Metadata metadata, ExecutionOptions options) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
new JdbcConnectionContextNonSharedImpl(
jdbcContext.getJdbcConnectionAccess(),
jdbcContext.getSqlStatementLogger(),
false
),
metadata.getDatabase().getDefaultNamespace().getName()
);
try {
performValidation( metadata, databaseInformation, options, jdbcContext.getDialect() );
}
finally {
try {
databaseInformation.cleanup();
}
catch (Exception e) {
log.debug( "Problem releasing DatabaseInformation : " + e.getMessage() );
}
}
}
public void performValidation(
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
Dialect dialect) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace )) {
continue;
@ -57,7 +97,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName()
);
validateTable( table, tableInformation, metadata );
validateTable( table, tableInformation, metadata, options, dialect );
}
}
@ -79,7 +119,12 @@ public class SchemaValidatorImpl implements SchemaValidator {
}
}
protected void validateTable(Table table, TableInformation tableInformation, Metadata metadata) {
protected void validateTable(
Table table,
TableInformation tableInformation,
Metadata metadata,
ExecutionOptions options,
Dialect dialect) {
if ( tableInformation == null ) {
throw new SchemaManagementException(
String.format(
@ -107,7 +152,7 @@ public class SchemaValidatorImpl implements SchemaValidator {
)
);
}
validateColumnType( table, column, existingColumn, metadata );
validateColumnType( table, column, existingColumn, metadata, options, dialect );
}
}
@ -115,7 +160,9 @@ public class SchemaValidatorImpl implements SchemaValidator {
Table table,
Column column,
ColumnInformation columnInformation,
Metadata metadata) {
Metadata metadata,
ExecutionOptions options,
Dialect dialect) {
boolean typesMatch = column.getSqlTypeCode( metadata ) == columnInformation.getTypeCode()
|| column.getSqlType( dialect, metadata ).toLowerCase(Locale.ROOT).startsWith( columnInformation.getTypeName().toLowerCase(Locale.ROOT) );
if ( !typesMatch ) {

View File

@ -1,77 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal;
import java.io.FileWriter;
import java.io.IOException;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.Target;
/**
* @author Steve Ebersole
*/
public class TargetFileImpl implements Target {
final private FileWriter fileWriter;
final private String delimiter;
final private Formatter formatter;
public TargetFileImpl(String outputFile, String delimiter) {
this( outputFile, delimiter, FormatStyle.NONE.getFormatter());
}
public TargetFileImpl(String outputFile, String delimiter, Formatter formatter) {
try {
this.delimiter = delimiter;
this.fileWriter = new FileWriter( outputFile );
this.formatter = formatter;
}
catch (IOException e) {
throw new SchemaManagementException( "Unable to open FileWriter [" + outputFile + "]", e );
}
}
@Override
public boolean acceptsImportScriptActions() {
return true;
}
@Override
public void prepare() {
}
@Override
public void accept(String action) {
try {
if (formatter != null) {
action = formatter.format(action);
}
fileWriter.write( action );
if ( delimiter != null ) {
fileWriter.write( delimiter );
}
fileWriter.write( "\n" );
}
catch (IOException e) {
throw new SchemaManagementException( "Unable to write to FileWriter", e );
}
}
@Override
public void release() {
if ( fileWriter != null ) {
try {
fileWriter.close();
}
catch (IOException ignore) {
}
}
}
}

View File

@ -0,0 +1,96 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.Connection;
import java.sql.SQLException;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* Basic support for JdbcConnectionContext implementations
*
* @author Steve Ebersole
*/
public abstract class AbstractJdbcConnectionContextImpl implements JdbcConnectionContext {
private final JdbcConnectionAccess jdbcConnectionAccess;
private final SqlStatementLogger sqlStatementLogger;
private final boolean needsAutoCommit;
private Connection jdbcConnection;
private boolean wasInitiallyAutoCommit;
public AbstractJdbcConnectionContextImpl(
JdbcConnectionAccess jdbcConnectionAccess,
SqlStatementLogger sqlStatementLogger,
boolean needsAutoCommit) {
this.jdbcConnectionAccess = jdbcConnectionAccess;
this.sqlStatementLogger = sqlStatementLogger;
this.needsAutoCommit = needsAutoCommit;
}
@Override
public Connection getConnection() {
if ( jdbcConnection == null ) {
try {
this.jdbcConnection = jdbcConnectionAccess.obtainConnection();
}
catch (SQLException e) {
throw new SchemaManagementException( "Unable to obtain JDBC Connection", e );
}
try {
if ( needsAutoCommit ) {
wasInitiallyAutoCommit = jdbcConnection.getAutoCommit();
jdbcConnection.setAutoCommit( true );
}
}
catch (SQLException e) {
throw new SchemaManagementException( "Unable to manage auto-commit", e );
}
}
return jdbcConnection;
}
@Override
public void logSqlStatement(String sqlStatement) {
// we explicitly use no formatting here because the statements we get
// will already be formatted if need be
sqlStatementLogger.logStatement( sqlStatement, FormatStyle.NONE.getFormatter() );
}
protected void reallyRelease() {
if ( jdbcConnection != null ) {
try {
if ( ! jdbcConnection.getAutoCommit() ) {
jdbcConnection.commit();
}
else {
// we possibly enabled auto-commit on the Connection, reset if needed
if ( needsAutoCommit && !wasInitiallyAutoCommit ) {
jdbcConnection.setAutoCommit( false );
}
}
}
catch (SQLException e) {
throw new SchemaManagementException(
"Unable to reset auto-commit after schema management; may or may not be a problem",
e
);
}
try {
jdbcConnectionAccess.releaseConnection( jdbcConnection );
}
catch (SQLException e) {
throw new SchemaManagementException( "Unable to release JDBC Connection", e );
}
}
}
}

View File

@ -0,0 +1,41 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.io.Reader;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
/**
* @author Steve Ebersole
*/
public abstract class AbstractScriptSourceInput implements ScriptSourceInput {
protected abstract Reader reader();
@Override
public void prepare() {
}
@Override
public List<String> read(ImportSqlCommandExtractor commandExtractor) {
final String[] commands = commandExtractor.extractCommands( reader() );
if ( commands == null ) {
return Collections.emptyList();
}
else {
return Arrays.asList( commands );
}
}
@Override
public void release() {
}
}

View File

@ -0,0 +1,47 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.io.IOException;
import java.io.Writer;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
/**
* @author Steve Ebersole
*/
public abstract class AbstractScriptTargetOutput implements ScriptTargetOutput {
protected static final String NEWLINE;
static {
final String systemNewLine = System.getProperty( "line.separator" );
NEWLINE = StringHelper.isNotEmpty( systemNewLine ) ? systemNewLine : "\n";
}
protected abstract Writer writer();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
try {
writer().write( command );
writer().write( NEWLINE );
writer().flush();
}
catch (IOException e) {
throw new CommandAcceptanceException( "Could not write to target script file", e );
}
}
@Override
public void release() {
}
}

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* Describes a schema generation target
*
* @author Steve Ebersole
*/
public interface GenerationTarget {
/**
* Prepare for accepting actions
*
* @throws SchemaManagementException If there is a problem preparing the target.
*/
void prepare();
/**
* Accept a command
*
* @param command The command
*
* @throws SchemaManagementException If there is a problem accepting the action.
*/
void accept(String command);
/**
* Release this target, giving it a change to release its resources.
*
* @throws SchemaManagementException If there is a problem releasing the target.
*/
void release();
}

View File

@ -0,0 +1,95 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* GenerationTarget implementation for handling generation directly to the database
*
* @author Steve Ebersole
*/
public class GenerationTargetToDatabase implements GenerationTarget {
private static final CoreMessageLogger log = CoreLogging.messageLogger( GenerationTargetToDatabase.class );
private final SqlExceptionHelper sqlExceptionHelper = new SqlExceptionHelper();
private final JdbcConnectionContext jdbcConnectionContext;
private Statement jdbcStatement;
public GenerationTargetToDatabase(JdbcConnectionContext jdbcConnectionContext) {
this.jdbcConnectionContext = jdbcConnectionContext;
}
@Override
public void prepare() {
}
@Override
public void accept(String command) {
try {
jdbcConnectionContext.logSqlStatement( command );
final Statement jdbcStatement = jdbcStatement();
jdbcStatement.execute( command );
try {
SQLWarning warnings = jdbcStatement.getWarnings();
if ( warnings != null) {
sqlExceptionHelper.logAndClearWarnings( jdbcStatement );
}
}
catch( SQLException e ) {
log.unableToLogSqlWarnings( e );
}
}
catch (SQLException e) {
throw new CommandAcceptanceException(
"Unable to execute command [" + command + "]",
e
);
}
}
protected Statement jdbcStatement() {
if ( jdbcStatement == null ) {
try {
jdbcStatement = jdbcConnectionContext.getConnection().createStatement();
}
catch (SQLException e) {
throw new SchemaManagementException(
"Unable to create JDBC Statement for schema management target",
e
);
}
}
return jdbcStatement;
}
@Override
public void release() {
if ( jdbcStatement != null ) {
try {
jdbcStatement.close();
}
catch (SQLException e) {
log.debug( "Unable to close JDBC statement after JPA schema generation : " + e.toString() );
}
}
jdbcStatement = null;
jdbcConnectionContext.release();
}
}

View File

@ -0,0 +1,47 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
/**
* GenerationTarget implementation for handling generation to scripts
*
* @author Steve Ebersole
*/
public class GenerationTargetToScript implements GenerationTarget {
private final ScriptTargetOutput scriptTarget;
private final String delimiter;
public GenerationTargetToScript(ScriptTargetOutput scriptTarget, String delimiter) {
if ( scriptTarget == null ) {
throw new SchemaManagementException( "ScriptTargetOutput cannot be null" );
}
this.scriptTarget = scriptTarget;
this.delimiter = delimiter;
}
@Override
public void prepare() {
scriptTarget.prepare();
}
@Override
public void accept(String command) {
if ( delimiter != null ) {
command += delimiter;
}
scriptTarget.accept( command );
}
@Override
public void release() {
scriptTarget.release();
}
}

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
/**
* GenerationTarget implementation for handling generation to System.out
*
* @author Steve Ebersole
*/
public class GenerationTargetToStdout implements GenerationTarget {
private final String delimiter;
public GenerationTargetToStdout(String delimiter) {
this.delimiter = delimiter;
}
public GenerationTargetToStdout() {
this ( null );
}
@Override
public void prepare() {
// nothing to do
}
@Override
public void accept(String command) {
if ( delimiter != null ) {
command += delimiter;
}
System.out.println( command );
}
@Override
public void release() {
}
}

View File

@ -0,0 +1,153 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
/**
* @author Steve Ebersole
*/
public class ImprovedDatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment;
private final ImprovedExtractionContextImpl extractionContext;
private final InformationExtractor extractor;
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
public ImprovedDatabaseInformationImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
JdbcConnectionContext connectionContext,
Namespace.Name defaultNamespace) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = new ImprovedExtractionContextImpl(
serviceRegistry,
jdbcEnvironment,
connectionContext,
defaultNamespace.getCatalog(),
defaultNamespace.getSchema(),
this
);
// todo : make this pluggable
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
// because we do not have defined a way to locate sequence info by name
initializeSequences();
}
private void initializeSequences() throws SQLException {
Iterable<SequenceInformation> itr = jdbcEnvironment.getDialect()
.getSequenceInformationExtractor()
.extractMetadata( extractionContext );
for ( SequenceInformation sequenceInformation : itr ) {
sequenceInformationMap.put(
// for now, follow the legacy behavior of storing just the
// unqualified sequence name.
new QualifiedSequenceName(
null,
null,
sequenceInformation.getSequenceName().getSequenceName()
),
sequenceInformation
);
}
}
@Override
public boolean catalogExists(Identifier catalog) {
return extractor.catalogExists( catalog );
}
@Override
public boolean schemaExists(Namespace.Name namespace) {
return extractor.schemaExists( namespace.getCatalog(), namespace.getSchema() );
}
@Override
public TableInformation getTableInformation(
Identifier catalogName,
Identifier schemaName,
Identifier tableName) {
return getTableInformation( new QualifiedTableName( catalogName, schemaName, tableName ) );
}
@Override
public TableInformation getTableInformation(
Namespace.Name namespace,
Identifier tableName) {
return getTableInformation( new QualifiedTableName( namespace, tableName ) );
}
@Override
public TableInformation getTableInformation(QualifiedTableName tableName) {
if ( tableName.getObjectName() == null ) {
throw new IllegalArgumentException( "Passed table name cannot be null" );
}
return extractor.getTable(
tableName.getCatalogName(),
tableName.getSchemaName(),
tableName.getTableName()
);
}
@Override
public SequenceInformation getSequenceInformation(
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName) {
return getSequenceInformation( new QualifiedSequenceName( catalogName, schemaName, sequenceName ) );
}
@Override
public SequenceInformation getSequenceInformation(Namespace.Name schemaName, Identifier sequenceName) {
return getSequenceInformation( new QualifiedSequenceName( schemaName, sequenceName ) );
}
@Override
public SequenceInformation getSequenceInformation(QualifiedSequenceName sequenceName) {
return locateSequenceInformation( sequenceName );
}
@Override
public void cleanup() {
extractionContext.cleanup();
}
@Override
public TableInformation locateTableInformation(QualifiedTableName tableName) {
return getTableInformation( tableName );
}
@Override
public SequenceInformation locateSequenceInformation(QualifiedSequenceName sequenceName) {
// again, follow legacy behavior
if ( sequenceName.getCatalogName() != null || sequenceName.getSchemaName() != null ) {
sequenceName = new QualifiedSequenceName( null, null, sequenceName.getSequenceName() );
}
return sequenceInformationMap.get( sequenceName );
}
}

View File

@ -0,0 +1,101 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
/**
* @author Steve Ebersole
*/
public class ImprovedExtractionContextImpl implements ExtractionContext {
private final ServiceRegistry serviceRegistry;
private final JdbcEnvironment jdbcEnvironment;
private final JdbcConnectionContext connectionContext;
private final Identifier defaultCatalog;
private final Identifier defaultSchema;
private final DatabaseObjectAccess databaseObjectAccess;
private DatabaseMetaData jdbcDatabaseMetaData;
public ImprovedExtractionContextImpl(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
JdbcConnectionContext connectionContext,
Identifier defaultCatalog,
Identifier defaultSchema,
DatabaseObjectAccess databaseObjectAccess) {
this.serviceRegistry = serviceRegistry;
this.jdbcEnvironment = jdbcEnvironment;
this.connectionContext = connectionContext;
this.defaultCatalog = defaultCatalog;
this.defaultSchema = defaultSchema;
this.databaseObjectAccess = databaseObjectAccess;
}
@Override
public ServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
@Override
public JdbcEnvironment getJdbcEnvironment() {
return jdbcEnvironment;
}
@Override
public Connection getJdbcConnection() {
return connectionContext.getConnection();
}
@Override
public DatabaseMetaData getJdbcDatabaseMetaData() {
if ( jdbcDatabaseMetaData == null ) {
try {
jdbcDatabaseMetaData = getJdbcConnection().getMetaData();
}
catch (SQLException e) {
throw jdbcEnvironment.getSqlExceptionHelper().convert(
e,
"Unable to obtain JDBC DatabaseMetaData"
);
}
}
return jdbcDatabaseMetaData;
}
@Override
public Identifier getDefaultCatalog() {
return defaultCatalog;
}
@Override
public Identifier getDefaultSchema() {
return defaultSchema;
}
@Override
public DatabaseObjectAccess getDatabaseObjectAccess() {
return databaseObjectAccess;
}
@Override
public void cleanup() {
if ( jdbcDatabaseMetaData != null ) {
jdbcDatabaseMetaData = null;
}
connectionContext.release();
}
}

View File

@ -0,0 +1,106 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.Connection;
import java.sql.SQLException;
import javax.persistence.PersistenceException;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.jboss.logging.Logger;
/**
* Implementation of JdbcConnectionAccess for use in cases where we
* leverage a ConnectionProvider for access to JDBC Connections.
*
* @author Steve Ebersole
*/
public class JdbcConnectionAccessConnectionProviderImpl implements JdbcConnectionAccess {
private static final Logger log = Logger.getLogger( JdbcConnectionAccessConnectionProviderImpl.class );
private final ConnectionProvider connectionProvider;
private final Connection jdbcConnection;
private final boolean wasInitiallyAutoCommit;
public JdbcConnectionAccessConnectionProviderImpl(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
try {
this.jdbcConnection = connectionProvider.getConnection();
}
catch (SQLException e) {
throw new PersistenceException( "Unable to obtain JDBC Connection", e );
}
boolean wasInitiallyAutoCommit;
try {
wasInitiallyAutoCommit = jdbcConnection.getAutoCommit();
if ( !wasInitiallyAutoCommit ) {
try {
jdbcConnection.setAutoCommit( true );
}
catch (SQLException e) {
throw new PersistenceException(
String.format(
"Could not set provided connection [%s] to auto-commit mode" +
" (needed for schema generation)",
jdbcConnection
),
e
);
}
}
}
catch (SQLException ignore) {
wasInitiallyAutoCommit = false;
}
log.debugf( "wasInitiallyAutoCommit=%s", wasInitiallyAutoCommit );
this.wasInitiallyAutoCommit = wasInitiallyAutoCommit;
}
@Override
public Connection obtainConnection() throws SQLException {
return jdbcConnection;
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
if ( connection != this.jdbcConnection ) {
throw new PersistenceException(
String.format(
"Connection [%s] passed back to %s was not the one obtained [%s] from it",
connection,
JdbcConnectionAccessConnectionProviderImpl.class.getName(),
jdbcConnection
)
);
}
// Reset auto-commit
if ( !wasInitiallyAutoCommit ) {
try {
if ( jdbcConnection.getAutoCommit() ) {
jdbcConnection.setAutoCommit( false );
}
}
catch (SQLException e) {
log.info( "Was unable to reset JDBC connection to no longer be in auto-commit mode" );
}
}
// Release the connection
connectionProvider.closeConnection( jdbcConnection );
}
@Override
public boolean supportsAggressiveRelease() {
return false;
}
}

View File

@ -0,0 +1,84 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.Connection;
import java.sql.SQLException;
import javax.persistence.PersistenceException;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.jboss.logging.Logger;
/**
* Implementation of JdbcConnectionAccess for cases where we are provided
* a JDBC Connection to use.
*
* @author Steve Ebersole
*/
public class JdbcConnectionAccessProvidedConnectionImpl implements JdbcConnectionAccess {
private static final Logger log = Logger.getLogger( JdbcConnectionAccessProvidedConnectionImpl.class );
private final Connection jdbcConnection;
private final boolean wasInitiallyAutoCommit;
public JdbcConnectionAccessProvidedConnectionImpl(Connection jdbcConnection) {
this.jdbcConnection = jdbcConnection;
boolean wasInitiallyAutoCommit;
try {
wasInitiallyAutoCommit = jdbcConnection.getAutoCommit();
if ( !wasInitiallyAutoCommit ) {
try {
jdbcConnection.setAutoCommit( true );
}
catch (SQLException e) {
throw new PersistenceException(
String.format(
"Could not set provided connection [%s] to auto-commit mode" +
" (needed for schema generation)",
jdbcConnection
),
e
);
}
}
}
catch (SQLException ignore) {
wasInitiallyAutoCommit = false;
}
log.debugf( "wasInitiallyAutoCommit=%s", wasInitiallyAutoCommit );
this.wasInitiallyAutoCommit = wasInitiallyAutoCommit;
}
@Override
public Connection obtainConnection() throws SQLException {
return jdbcConnection;
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
// NOTE : reset auto-commit, but *do not* close the Connection. The application handed us this connection
if ( !wasInitiallyAutoCommit ) {
try {
if ( jdbcConnection.getAutoCommit() ) {
jdbcConnection.setAutoCommit( false );
}
}
catch (SQLException e) {
log.info( "Was unable to reset JDBC connection to no longer be in auto-commit mode" );
}
}
}
@Override
public boolean supportsAggressiveRelease() {
return false;
}
}

View File

@ -0,0 +1,22 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.sql.Connection;
/**
* JDBC-based specialization of the DataStoreConnectionContext contract.
*
* @author Steve Ebersole
*/
public interface JdbcConnectionContext {
Connection getConnection();
void logSqlStatement(String sqlStatement);
void release();
}

View File

@ -0,0 +1,29 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
/**
* @author Steve Ebersole
*/
public class JdbcConnectionContextNonSharedImpl extends AbstractJdbcConnectionContextImpl {
public JdbcConnectionContextNonSharedImpl(
JdbcConnectionAccess jdbcConnectionAccess,
SqlStatementLogger sqlStatementLogger,
boolean needsAutoCommit) {
super( jdbcConnectionAccess, sqlStatementLogger, needsAutoCommit );
}
@Override
public void release() {
// for non-shared JdbcConnectionContext instances it is safe to really
// release them as part of the normal source/target cleanup call stack
reallyRelease();
}
}

View File

@ -0,0 +1,34 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
/**
* @author Steve Ebersole
*/
public class JdbcConnectionContextSharedImpl extends AbstractJdbcConnectionContextImpl {
public JdbcConnectionContextSharedImpl(
JdbcConnectionAccess jdbcConnectionAccess,
SqlStatementLogger sqlStatementLogger,
boolean needsAutoCommit) {
super( jdbcConnectionAccess, sqlStatementLogger, needsAutoCommit );
}
@Override
public void release() {
// for a shared JdbcConnectionContext do not release it as part of the normal
// source/target cleanup call stacks. The creator will explicitly close the
// shared JdbcConnectionContext via #reallyRelease
}
@Override
public void reallyRelease() {
super.reallyRelease();
}
}

View File

@ -0,0 +1,20 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
/**
* @author Steve Ebersole
*/
public interface JdbcContext {
JdbcConnectionAccess getJdbcConnectionAccess();
Dialect getDialect();
SqlStatementLogger getSqlStatementLogger();
}

View File

@ -4,13 +4,15 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.internal.exec;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import javax.persistence.PersistenceException;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.jboss.logging.Logger;
@ -20,26 +22,33 @@ import org.jboss.logging.Logger;
*
* @author Steve Ebersole
*/
public class ScriptSourceInputFromFile extends ScriptSourceInputFromReader implements ScriptSourceInput {
public class ScriptSourceInputFromFile extends AbstractScriptSourceInput implements ScriptSourceInput {
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
private final File file;
private Reader reader;
/**
* Constructs a ScriptSourceInputFromFile
*
* @param file The file to read from
*/
public ScriptSourceInputFromFile(File file) {
super( toFileReader( file ) );
this.file = file;
}
@Override
public void release() {
try {
reader().close();
}
catch (IOException e) {
log.warn( "Unable to close file reader for generation script source" );
protected Reader reader() {
if ( reader == null ) {
throw new SchemaManagementException( "Illegal state - reader is null - not prepared" );
}
return reader;
}
@Override
public void prepare() {
super.prepare();
this.reader = toFileReader( file );
}
@SuppressWarnings("ResultOfMethodCallIgnored")
@ -62,11 +71,25 @@ public class ScriptSourceInputFromFile extends ScriptSourceInputFromReader imple
return new FileReader( file );
}
catch (IOException e) {
throw new PersistenceException(
throw new SchemaManagementException(
"Unable to open specified script target file [" + file + "] for reading",
e
);
}
}
@Override
public void release() {
try {
reader.close();
}
catch (IOException e) {
log.warn( "Unable to close file reader for generation script source" );
}
}
@Override
public String toString() {
return "ScriptSourceInputFromFile(" + file.getAbsolutePath() + ")";
}
}

View File

@ -4,20 +4,18 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.internal.exec;
import java.io.Reader;
import java.util.Arrays;
import java.util.Collections;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
/**
* ScriptSourceInput implementation for explicitly given Readers. The readers are not released by this class.
*
* @author Steve Ebersole
*/
public class ScriptSourceInputFromReader implements ScriptSourceInput {
public class ScriptSourceInputFromReader extends AbstractScriptSourceInput implements ScriptSourceInput {
private final Reader reader;
/**
@ -30,22 +28,12 @@ public class ScriptSourceInputFromReader implements ScriptSourceInput {
}
@Override
public Iterable<String> read(ImportSqlCommandExtractor commandExtractor) {
final String[] commands = commandExtractor.extractCommands( reader );
if ( commands == null ) {
return Collections.emptyList();
}
else {
return Arrays.asList( commands );
}
}
@Override
public void release() {
// nothing to do here
}
protected Reader reader() {
return reader;
}
@Override
public String toString() {
return "ScriptSourceInputFromReader()";
}
}

View File

@ -4,13 +4,15 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.internal.exec;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import javax.persistence.PersistenceException;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.jboss.logging.Logger;
@ -21,16 +23,40 @@ import org.jboss.logging.Logger;
* @author Christian Beikov
* @author Steve Ebersole
*/
public class ScriptSourceInputFromUrl extends ScriptSourceInputFromReader implements ScriptSourceInput {
public class ScriptSourceInputFromUrl extends AbstractScriptSourceInput implements ScriptSourceInput {
private static final Logger log = Logger.getLogger( ScriptSourceInputFromFile.class );
private final URL url;
private Reader reader;
/**
* Constructs a ScriptSourceInputFromUrl instance
*
* @param url The url to read from
*/
public ScriptSourceInputFromUrl(URL url) {
super( toReader( url ) );
this.url = url;
}
@Override
protected Reader reader() {
if ( reader == null ) {
throw new SchemaManagementException( "Illegal state - reader is null - not prepared" );
}
return reader;
}
@Override
public void prepare() {
super.prepare();
try {
this.reader = new InputStreamReader( url.openStream() );
}
catch (IOException e) {
throw new SchemaManagementException(
"Unable to open specified script source url [" + url + "] for reading"
);
}
}
@Override
@ -43,15 +69,8 @@ public class ScriptSourceInputFromUrl extends ScriptSourceInputFromReader implem
}
}
private static Reader toReader(URL url) {
try {
return new InputStreamReader( url.openStream() );
}
catch (IOException e) {
throw new PersistenceException(
"Unable to open specified script source url [" + url + "] for reading"
);
}
@Override
public String toString() {
return "ScriptSourceInputFromUrl(" + url.toExternalForm() + ")";
}
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.util.Collections;
import java.util.List;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
/**
* Used in cases where a specified source cannot be found
*
* @author Steve Ebersole
*/
public class ScriptSourceInputNonExistentImpl implements ScriptSourceInput {
/**
* Singleton access
*/
public static final ScriptSourceInputNonExistentImpl INSTANCE = new ScriptSourceInputNonExistentImpl();
@Override
public void prepare() {
}
@Override
public List<String> read(ImportSqlCommandExtractor commandExtractor) {
return Collections.emptyList();
}
@Override
public void release() {
}
}

View File

@ -4,15 +4,16 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.internal.exec;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import javax.persistence.PersistenceException;
import org.hibernate.jpa.internal.HEMLogging;
import org.hibernate.internal.CoreLogging;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.jboss.logging.Logger;
@ -21,25 +22,42 @@ import org.jboss.logging.Logger;
*
* @author Steve Ebersole
*/
public class ScriptTargetOutputToFile extends ScriptTargetOutputToWriter implements ScriptTargetOutput {
private static final Logger log = HEMLogging.logger( ScriptTargetOutputToFile.class );
public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput implements ScriptTargetOutput {
private static final Logger log = CoreLogging.logger( ScriptTargetOutputToFile.class );
private final File file;
private Writer writer;
/**
* Constructs a ScriptTargetOutputToFile
*
* @param file The file to write to
*/
public ScriptTargetOutputToFile(File file) {
super( toFileWriter( file ) );
this.file = file;
}
@Override
protected Writer writer() {
if ( writer == null ) {
throw new SchemaManagementException( "Illegal state : writer null - not prepared" );
}
return writer;
}
@Override
public void prepare() {
super.prepare();
this.writer = toFileWriter( this.file );
}
@Override
public void release() {
try {
writer().close();
}
catch (IOException e) {
throw new PersistenceException( "Unable to close file writer : " + e.toString() );
if ( writer != null ) {
try {
writer.close();
}
catch (IOException e) {
throw new SchemaManagementException( "Unable to close file writer : " + e.toString() );
}
finally {
writer = null;
}
}
}
@ -59,10 +77,10 @@ public class ScriptTargetOutputToFile extends ScriptTargetOutputToWriter impleme
log.debug( "Exception calling File#createNewFile : " + e.toString() );
}
try {
return new FileWriter( file );
return new FileWriter( file, true );
}
catch (IOException e) {
throw new PersistenceException( "Unable to open specified script target file for writing : " + file, e );
throw new SchemaManagementException( "Unable to open specified script target file for writing : " + file, e );
}
}
}

View File

@ -0,0 +1,54 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.hibernate.tool.schema.spi.SchemaManagementException;
/**
* @author Steve Ebersole
*/
public class ScriptTargetOutputToStdout extends AbstractScriptTargetOutput {
private Writer writer;
@Override
protected Writer writer() {
if ( writer == null ) {
throw new SchemaManagementException( "Illegal state : writer null - not prepared" );
}
return writer;
}
@Override
public void prepare() {
super.prepare();
this.writer = new OutputStreamWriter( System.out );
}
@Override
public void accept(String command) {
super.accept( command );
}
@Override
public void release() {
if ( writer != null ) {
try {
writer.close();
}
catch (IOException e) {
throw new SchemaManagementException( "Unable to close file writer : " + e.toString() );
}
finally {
writer = null;
}
}
}
}

View File

@ -4,16 +4,17 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.internal.exec;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.net.URISyntaxException;
import java.net.URL;
import javax.persistence.PersistenceException;
import org.hibernate.jpa.internal.HEMLogging;
import org.hibernate.internal.CoreLogging;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.jboss.logging.Logger;
@ -22,16 +23,28 @@ import org.jboss.logging.Logger;
*
* @author Steve Ebersole
*/
public class ScriptTargetOutputToUrl extends ScriptTargetOutputToWriter implements ScriptTargetOutput {
private static final Logger log = HEMLogging.logger( ScriptTargetOutputToUrl.class );
public class ScriptTargetOutputToUrl extends AbstractScriptTargetOutput implements ScriptTargetOutput {
private static final Logger log = CoreLogging.logger( ScriptTargetOutputToUrl.class );
private final URL url;
private Writer writer;
/**
* Constructs a ScriptTargetOutputToUrl
*
* @param url The url to write to
*/
public ScriptTargetOutputToUrl(URL url) {
super( toWriter( url ) );
this.url = url;
}
@Override
protected Writer writer() {
if ( writer == null ) {
throw new SchemaManagementException( "Illegal state : writer null - not prepared" );
}
return writer;
}
@Override
public void prepare() {
super.prepare();
this.writer = toWriter( url );
}
@Override
@ -40,7 +53,7 @@ public class ScriptTargetOutputToUrl extends ScriptTargetOutputToWriter implemen
writer().close();
}
catch (IOException e) {
throw new PersistenceException( "Unable to close file writer : " + e.toString() );
throw new SchemaManagementException( "Unable to close file writer : " + e.toString() );
}
}
@ -53,7 +66,7 @@ public class ScriptTargetOutputToUrl extends ScriptTargetOutputToWriter implemen
return ScriptTargetOutputToFile.toFileWriter( new File( url.toURI() ) );
}
catch (URISyntaxException e) {
throw new PersistenceException(
throw new SchemaManagementException(
String.format(
"Could not convert specified URL[%s] to a File reference",
url

View File

@ -0,0 +1,40 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.internal.exec;
import java.io.Writer;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
/**
* ScriptTargetOutput implementation for supplied Writer references.
* <p/>
* Specifically, we are handed this Writer so we do not want to close it.
*
* @author Steve Ebersole
*/
public class ScriptTargetOutputToWriter extends AbstractScriptTargetOutput implements ScriptTargetOutput {
private final Writer writer;
/**
* Constructs a ScriptTargetOutputToWriter
*
* @param writer The writer to write to
*/
public ScriptTargetOutputToWriter(Writer writer) {
if ( writer == null ) {
throw new SchemaManagementException( "Writer cannot be null" );
}
this.writer = writer;
}
@Override
protected Writer writer() {
return writer;
}
}

View File

@ -0,0 +1,22 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.HibernateException;
/**
* @author Steve Ebersole
*/
public class CommandAcceptanceException extends HibernateException {
public CommandAcceptanceException(String message) {
super( message );
}
public CommandAcceptanceException(String message, Throwable cause) {
super( message, cause );
}
}

View File

@ -0,0 +1,27 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.service.ServiceRegistry;
/**
* Memento representing the dropping of a schema as part of create-drop
* hbm2ddl.auto handling. This memento is registered with the
* SessionFactory and executed as the SessionFactory is closing.
* <p/>
* Implementations should be Serializable
*
* @author Steve Ebersole
*/
public interface DelayedDropAction {
/**
* Perform the delayed schema drop.
*
* @param serviceRegistry Access to the ServiceRegistry
*/
void perform(ServiceRegistry serviceRegistry);
}

View File

@ -0,0 +1,22 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
/**
* Callback to allow the built DelayedDropAction, if indicated, to be registered
* back with the SessionFactory (or the thing that will manage its later execution).
*
* @author Steve Ebersole
*/
public interface DelayedDropRegistry {
/**
* Register the built DelayedDropAction
*
* @param action The delayed schema drop memento
*/
void registerOnCloseAction(DelayedDropAction action);
}

View File

@ -0,0 +1,27 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
/**
* Implementation of DelayedDropRegistry for cases when the delayed-drop portion of
* "create-drop" is not valid.
*
* @author Steve Ebersole
*/
public class DelayedDropRegistryNotAvailableImpl implements DelayedDropRegistry {
/**
* Singleton access
*/
public static final DelayedDropRegistryNotAvailableImpl INSTANCE = new DelayedDropRegistryNotAvailableImpl();
@Override
public void registerOnCloseAction(DelayedDropAction action) {
throw new SchemaManagementException(
"DelayedDropRegistry is not available in this context. 'create-drop' action is not valid"
);
}
}

View File

@ -0,0 +1,21 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
/**
* Contract for how CommandAcceptanceException errors should be handled (logged, ignored, etc).
*
* @author Steve Ebersole
*/
public interface ExceptionHandler {
/**
* Handle the CommandAcceptanceException error
*
* @param exception The CommandAcceptanceException to handle
*/
void handleException(CommandAcceptanceException exception);
}

View File

@ -0,0 +1,23 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import java.util.Map;
import org.hibernate.Incubating;
/**
* Parameter object representing options for schema management tool execution
*
* @author Steve Ebersole
*/
@Incubating
public interface ExecutionOptions {
Map getConfigurationValues();
boolean shouldManageNamespaces();
ExceptionHandler getExceptionHandler();
}

View File

@ -12,24 +12,24 @@ import org.hibernate.boot.model.relational.Exportable;
/**
* Defines a contract for exporting of database objects (tables, sequences, etc) for use in SQL {@code CREATE} and
* {@code DROP} scripts
* {@code DROP} scripts.
*
* @author Steve Ebersole
*/
public interface Exporter<T extends Exportable> {
public static final String[] NO_COMMANDS = new String[0];
String[] NO_COMMANDS = new String[0];
/**
* Get the commands needed for creation.
*
* @return The commands needed for creation scripting.
*/
public String[] getSqlCreateStrings(T exportable, Metadata metadata);
String[] getSqlCreateStrings(T exportable, Metadata metadata);
/**
* Get the commands needed for dropping.
*
* @return The commands needed for drop scripting.
*/
public String[] getSqlDropStrings(T exportable, Metadata metadata);
String[] getSqlDropStrings(T exportable, Metadata metadata);
}

View File

@ -0,0 +1,17 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
/**
* JPA ties the notion of {@link SourceDescriptor} and {@link TargetDescriptor}
* together: meaning that a SourceDescriptor is specific to a given TargetDescriptor.
* This contract models that association
*
* @author Steve Ebersole
*/
public interface JpaTargetAndSourceDescriptor extends TargetDescriptor, SourceDescriptor {
}

View File

@ -6,74 +6,27 @@
*/
package org.hibernate.tool.schema.spi;
import java.util.List;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
import org.hibernate.dialect.Dialect;
/**
* Service delegate for handling schema creation.
* <p/>
* The actual contract here is kind of convoluted with the design
* idea of allowing this to work in ORM (JDBC) as well as in non-JDBC
* environments (OGM, e.g.) simultaneously. ExecutionContext allows
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaCreator {
/**
* Perform the creation to the specified targets
* Perform a schema creation from the indicated source(s) to the indicated target(s).
*
* @param metadata The "compiled" mapping metadata.
* @param createNamespaces Should the schema(s)/catalog(s) actually be created as well ({@code CREATE SCHEMA})?
* @param targets The targets for creation
*
* @throws SchemaManagementException Indicates a problem processing the creation
* @param metadata Represents the schema to be created.
* @param options Options for executing the creation
* @param sourceDescriptor description of the source(s) of creation commands
* @param targetDescriptor description of the target(s) for the creation commands
*/
public void doCreation(
Metadata metadata,
boolean createNamespaces,
Target... targets) throws SchemaManagementException;
/**
* Perform the creation to the specified targets
*
* @param metadata The "compiled" mapping metadata.
* @param createNamespaces Should the schema(s)/catalog(s) actually be created as well ({@code CREATE SCHEMA})?
* @param dialect Allow explicitly passing the Dialect to use.
* @param targets The targets for creation
*
* @throws SchemaManagementException Indicates a problem processing the creation
*/
public void doCreation(
Metadata metadata,
boolean createNamespaces,
Dialect dialect,
Target... targets) throws SchemaManagementException;
/**
* Perform the creation to the specified targets
*
* @param metadata The "compiled" mapping metadata.
* @param createNamespaces Should the schema(s) actually be created as well ({@code CREATE SCHEMA})?
* @param targets The targets for creation
*
* @throws SchemaManagementException Indicates a problem processing the creation
*/
public void doCreation(
Metadata metadata,
boolean createNamespaces,
List<Target> targets) throws SchemaManagementException;
/**
* Perform the creation to the specified targets
*
* @param metadata The "compiled" mapping metadata.
* @param createNamespaces Should the schema(s)/catalog(s) actually be created as well ({@code CREATE SCHEMA})?
* @param dialect Allow explicitly passing the Dialect to use.
* @param targets The targets for creation
*
* @throws SchemaManagementException Indicates a problem processing the creation
*/
public void doCreation(
Metadata metadata,
boolean createNamespaces,
Dialect dialect,
List<Target> targets) throws SchemaManagementException;
void doCreation(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor);
}

View File

@ -6,57 +6,35 @@
*/
package org.hibernate.tool.schema.spi;
import java.util.List;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
import org.hibernate.dialect.Dialect;
/**
* Service delegate for handling schema dropping.
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaDropper {
/**
* Perform the drop to the specified targets
* Perform a schema drop from the indicated source(s) to the indicated target(s).
*
* @param metadata The "compiled" mapping metadata.
* @param dropNamespaces Should the schema(s)/catalog(s) actually be dropped also ({@code DROP SCHEMA})?
* @param targets The targets for drop
*
* @throws SchemaManagementException Indicates a problem processing the creation
* @param metadata Represents the schema to be dropped.
* @param options Options for executing the drop
* @param sourceDescriptor description of the source(s) of drop commands
* @param targetDescriptor description of the target(s) for the drop commands
*/
public void doDrop(Metadata metadata, boolean dropNamespaces, Target... targets) throws SchemaManagementException;
/**
* Perform the drop to the specified targets
*
* @param metadata The "compiled" mapping metadata.
* @param dropNamespaces Should the schema(s)/catalog(s) actually be dropped also ({@code DROP SCHEMA})?
* @param targets The targets for drop
*
* @throws SchemaManagementException Indicates a problem processing the creation
*/
public void doDrop(Metadata metadata, boolean dropNamespaces, Dialect dialect, Target... targets) throws SchemaManagementException;
void doDrop(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor);
/**
* Perform the drop to the specified targets
* Build a delayed Runnable for performing schema dropping. This implicitly
* targets the underlying data-store.
*
* @param metadata The "compiled" mapping metadata.
* @param dropNamespaces Should the schema(s)/catalog(s) actually be dropped also ({@code DROP SCHEMA})?
* @param targets The targets for drop
* @param metadata The metadata to drop
* @param options The drop options
* @param sourceDescriptor For access to the {@link SourceDescriptor#getScriptSourceInput()}
*
* @throws SchemaManagementException Indicates a problem processing the creation
* @return The Runnable
*/
public void doDrop(Metadata metadata, boolean dropNamespaces, List<Target> targets) throws SchemaManagementException;
/**
* Perform the drop to the specified targets
*
* @param metadata The "compiled" mapping metadata.
* @param dropNamespaces Should the schema(s)/catalog(s) actually be dropped also ({@code DROP SCHEMA})?
* @param targets The targets for drop
*
* @throws SchemaManagementException Indicates a problem processing the creation
*/
public void doDrop(Metadata metadata, boolean dropNamespaces, Dialect dialect, List<Target> targets) throws SchemaManagementException;
DelayedDropAction buildDelayedAction(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor);
}

View File

@ -6,13 +6,17 @@
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.mapping.Table;
/**
* Defines a filter for Hibernate's schema tooling.
*
* @since 5.1
*/
@Incubating
public interface SchemaFilter {
/**
* Should the given namespace (catalog+schema) be included? If {@code true}, the

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
/**
* Used to specify the {@link org.hibernate.tool.schema.spi.SchemaFilter}s to be used by create, drop, migrate and validate
* operations on the database schema. These filters can be used to limit the scope of operations to specific namespaces,
@ -13,6 +15,7 @@ package org.hibernate.tool.schema.spi;
*
* @since 5.1
*/
@Incubating
public interface SchemaFilterProvider {
/**
* Get the filter to be applied to {@link SchemaCreator} processing

View File

@ -10,6 +10,10 @@ import org.hibernate.HibernateException;
/**
* Indicates a problem in performing schema management.
* <p/>
* Specifically this represents a a problem of an infrastructural-nature. For
* representing problems applying a specific command see
* {@link CommandAcceptanceException}
*
* @author Steve Ebersole
*/

View File

@ -8,6 +8,7 @@ package org.hibernate.tool.schema.spi;
import java.util.Map;
import org.hibernate.Incubating;
import org.hibernate.service.Service;
/**
@ -15,9 +16,10 @@ import org.hibernate.service.Service;
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaManagementTool extends Service {
public SchemaCreator getSchemaCreator(Map options);
public SchemaDropper getSchemaDropper(Map options);
public SchemaMigrator getSchemaMigrator(Map options);
public SchemaValidator getSchemaValidator(Map options);
SchemaCreator getSchemaCreator(Map options);
SchemaDropper getSchemaDropper(Map options);
SchemaMigrator getSchemaMigrator(Map options);
SchemaValidator getSchemaValidator(Map options);
}

View File

@ -0,0 +1,443 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import java.util.EnumSet;
import java.util.Map;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ActionGrouping;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.Helper;
import org.jboss.logging.Logger;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CREATE_SCRIPT_SOURCE;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_CREATE_SOURCE;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DROP_SCRIPT_SOURCE;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DROP_SOURCE;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET;
/**
* Responsible for coordinating SchemaManagementTool execution(s) for auto-tooling whether
* from JPA or hbm2ddl.auto.
* <p/>
* The main entry point is {@link #process}
*
* @author Steve Ebersole
*/
public class SchemaManagementToolCoordinator {
private static final Logger log = Logger.getLogger( SchemaManagementToolCoordinator.class );
public static void process(
final Metadata metadata,
final ServiceRegistry serviceRegistry,
final Map configurationValues,
DelayedDropRegistry delayedDropRegistry) {
final ActionGrouping actions = ActionGrouping.interpret( configurationValues );
if ( actions.getDatabaseAction() == Action.NONE && actions.getScriptAction() == Action.NONE ) {
// no actions specified
log.debug( "No actions specified; doing nothing" );
return;
}
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ExecutionOptions executionOptions = buildExecutionOptions(
configurationValues,
ExceptionHandlerLoggedImpl.INSTANCE
);
performDatabaseAction( actions.getDatabaseAction(), metadata, tool, serviceRegistry, executionOptions );
performScriptAction( actions.getScriptAction(), metadata, tool, serviceRegistry, executionOptions );
if ( actions.getDatabaseAction() == Action.CREATE_DROP ) {
//noinspection unchecked
delayedDropRegistry.registerOnCloseAction(
tool.getSchemaDropper( configurationValues ).buildDelayedAction(
metadata,
executionOptions,
buildDatabaseTargetDescriptor(
configurationValues,
DropSettingSelector.INSTANCE,
serviceRegistry
)
)
);
}
}
public static ExecutionOptions buildExecutionOptions(
final Map configurationValues,
final ExceptionHandler exceptionHandler) {
return new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return Helper.interpretNamespaceHandling( configurationValues );
}
@Override
public Map getConfigurationValues() {
return configurationValues;
}
@Override
public ExceptionHandler getExceptionHandler() {
return exceptionHandler;
}
};
}
@SuppressWarnings("unchecked")
private static void performDatabaseAction(
final Action action,
Metadata metadata,
SchemaManagementTool tool,
ServiceRegistry serviceRegistry,
final ExecutionOptions executionOptions) {
// IMPL NOTE : JPA binds source and target info..
switch ( action ) {
case CREATE_ONLY: {
//
final JpaTargetAndSourceDescriptor createDescriptor = buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
createDescriptor,
createDescriptor
);
break;
}
case CREATE:
case CREATE_DROP: {
final JpaTargetAndSourceDescriptor dropDescriptor = buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
dropDescriptor,
dropDescriptor
);
final JpaTargetAndSourceDescriptor createDescriptor = buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
createDescriptor,
createDescriptor
);
break;
}
case DROP: {
final JpaTargetAndSourceDescriptor dropDescriptor = buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
dropDescriptor,
dropDescriptor
);
break;
}
case UPDATE: {
final JpaTargetAndSourceDescriptor migrateDescriptor = buildDatabaseTargetDescriptor(
executionOptions.getConfigurationValues(),
MigrateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata,
executionOptions,
migrateDescriptor
);
break;
}
case VALIDATE: {
tool.getSchemaValidator( executionOptions.getConfigurationValues() ).doValidation(
metadata,
executionOptions
);
break;
}
}
}
private static JpaTargetAndSourceDescriptor buildDatabaseTargetDescriptor(
Map configurationValues,
SettingSelector settingSelector,
ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
final SourceType sourceType = SourceType.interpret(
settingSelector.getSourceTypeSetting( configurationValues ),
scriptSourceSetting != null ? SourceType.SCRIPT : SourceType.METADATA
);
final boolean includesScripts = sourceType != SourceType.METADATA;
if ( includesScripts && scriptSourceSetting == null ) {
throw new SchemaManagementException(
"Schema generation configuration indicated to include CREATE scripts, but no script was specified"
);
}
final ScriptSourceInput scriptSourceInput = includesScripts
? Helper.interpretScriptSourceSetting( scriptSourceSetting, serviceRegistry.getService( ClassLoaderService.class ) )
: null;
return new JpaTargetAndSourceDescriptor() {
@Override
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.DATABASE );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return null;
}
@Override
public SourceType getSourceType() {
return sourceType;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return scriptSourceInput;
}
};
}
@SuppressWarnings("unchecked")
private static void performScriptAction(
Action scriptAction,
Metadata metadata,
SchemaManagementTool tool,
ServiceRegistry serviceRegistry,
ExecutionOptions executionOptions) {
switch ( scriptAction ) {
case CREATE_ONLY: {
final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
createDescriptor,
createDescriptor
);
break;
}
case CREATE:
case CREATE_DROP: {
final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
dropDescriptor,
dropDescriptor
);
final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
createDescriptor,
createDescriptor
);
break;
}
case DROP: {
final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
dropDescriptor,
dropDescriptor
);
break;
}
case UPDATE: {
final JpaTargetAndSourceDescriptor migrateDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(),
MigrateSettingSelector.INSTANCE,
serviceRegistry
);
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata,
executionOptions,
migrateDescriptor
);
break;
}
case VALIDATE: {
throw new SchemaManagementException( "VALIDATE is not valid SchemaManagementTool action for script output" );
}
}
}
private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor(
Map configurationValues,
SettingSelector settingSelector,
ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
final SourceType sourceType = SourceType.interpret(
settingSelector.getSourceTypeSetting( configurationValues ),
scriptSourceSetting != null ? SourceType.SCRIPT : SourceType.METADATA
);
final boolean includesScripts = sourceType != SourceType.METADATA;
if ( includesScripts && scriptSourceSetting == null ) {
throw new SchemaManagementException(
"Schema generation configuration indicated to include CREATE scripts, but no script was specified"
);
}
final ScriptSourceInput scriptSourceInput = includesScripts
? Helper.interpretScriptSourceSetting( scriptSourceSetting, serviceRegistry.getService( ClassLoaderService.class ) )
: null;
final ScriptTargetOutput scriptTargetOutput = Helper.interpretScriptTargetSetting(
settingSelector.getScriptTargetSetting( configurationValues ),
serviceRegistry.getService( ClassLoaderService.class )
);
return new JpaTargetAndSourceDescriptor() {
@Override
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.SCRIPT );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return scriptTargetOutput;
}
@Override
public SourceType getSourceType() {
return sourceType;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return scriptSourceInput;
}
};
}
private interface SettingSelector {
Object getSourceTypeSetting(Map configurationValues);
Object getScriptSourceSetting(Map configurationValues);
Object getScriptTargetSetting(Map configurationValues);
}
private static class CreateSettingSelector implements SettingSelector {
/**
* Singleton access
*/
public static final CreateSettingSelector INSTANCE = new CreateSettingSelector();
@Override
public Object getSourceTypeSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SOURCE );
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE );
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
}
}
private static class DropSettingSelector implements SettingSelector {
/**
* Singleton access
*/
public static final DropSettingSelector INSTANCE = new DropSettingSelector();
@Override
public Object getSourceTypeSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SOURCE );
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE );
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET );
}
}
private static class MigrateSettingSelector implements SettingSelector {
/**
* Singleton access
*/
public static final MigrateSettingSelector INSTANCE = new MigrateSettingSelector();
// todo : should this define new migrattor-specific settings?
// for now we reuse the CREATE settings where applicable
@Override
public Object getSourceTypeSetting(Map configurationValues) {
// for now, don't allow script source
return SourceType.METADATA;
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
// for now, don't allow script source
return null;
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
// for now, reuse the CREATE script target setting
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
}
}
}

View File

@ -6,30 +6,22 @@
*/
package org.hibernate.tool.schema.spi;
import java.util.List;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
/**
* Service delegate for handling schema migration.
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaMigrator {
/**
* Perform a migration to the specified targets.
* Perform a schema migration (alteration) from the indicated source(s) to the indicated target(s).
*
* @param metadata The "compiled" mapping metadata.
* @param existingDatabase Access to the information about the existing database.
* @param createNamespaces Should the schema(s)/catalog(s) actually be created?
* @param targets The migration targets
*
* @throws SchemaManagementException
* @param metadata Represents the schema to be altered.
* @param options Options for executing the alteration
* @param targetDescriptor description of the target(s) for the alteration commands
*/
public void doMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
boolean createNamespaces,
List<Target> targets) throws SchemaManagementException;
void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor);
}

View File

@ -6,24 +6,21 @@
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.Incubating;
import org.hibernate.boot.Metadata;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
/**
* Service delegate for handling schema validations
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaValidator {
/**
* Handle schema validation requests
* Perform the validation of the schema described by Metadata
*
* @param metadata The "compiled" mapping metadata.
* @param databaseInformation Access to the existing database information.
*
* @throws SchemaManagementException
* @param metadata Represents the schema to be validated
* @param options Options for executing the validation
*/
public void doValidation(
Metadata metadata,
DatabaseInformation databaseInformation) throws SchemaManagementException;
void doValidation(Metadata metadata, ExecutionOptions options);
}

View File

@ -4,7 +4,9 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.spi;
import java.util.List;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
@ -15,6 +17,11 @@ import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
* @author Steve Ebersole
*/
public interface ScriptSourceInput {
/**
* Prepare source for use.
*/
void prepare();
/**
* Read the abstracted script, using the given extractor to split up the input into individual commands.
*
@ -22,10 +29,10 @@ public interface ScriptSourceInput {
*
* @return The scripted commands
*/
public Iterable<String> read(ImportSqlCommandExtractor commandExtractor);
List<String> read(ImportSqlCommandExtractor commandExtractor);
/**
* Release this input.
*/
public void release();
void release();
}

View File

@ -4,24 +4,29 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.jpa.internal.schemagen;
package org.hibernate.tool.schema.spi;
/**
* Contract for hiding the differences between a passed Writer, File or URL in terms of how we write output
* scripts.
* Contract for hiding the differences between a passed Writer, File or URL in
* terms of how we write output scripts.
*
* @author Steve Ebersole
*/
public interface ScriptTargetOutput {
/**
* Prepare the script target to {@link #accept(String) accept} commands
*/
void prepare();
/**
* Accept the given command and write it to the abstracted script
*
* @param command The command
*/
public void accept(String command);
void accept(String command);
/**
* Release this output
*/
public void release();
void release();
}

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.tool.schema.SourceType;
/**
* Describes a source for schema create, drop and migrate actions.
* <p/>
* If {@link #getSourceType} indicates that a script should be a source, then
* {@link #getScriptSourceInput} identifies the script.
* <p/>
* The purpose here is also to allow other back-ends (OGM) by simply describing
* where to find sources rather than defining the sources themselves. The reason
* being that ultimately the Java type representing a "DDL command" might be different;
* e.g., String for JDBC.
*
* @author Steve Ebersole
*/
public interface SourceDescriptor {
/**
* The indicated source type for this target type.
*
* @return The source type
*/
SourceType getSourceType();
/**
* If {@link #getSourceType()} indicates scripts are involved, returns
* a representation of the script file to read. Otherwise, returns {@code null}
*
* @return The script file to read.
*/
ScriptSourceInput getScriptSourceInput();
}

View File

@ -1,47 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
/**
* Describes the target of schema management actions. Typically this would be a stream/writer or the database
* Connection
*
* @author Steve Ebersole
*/
public interface Target {
/**
* Does this target accept actions coming from an import script? If {@code false}, actions are not
* sent to this target's {@link #accept(String)} method
*
* @return {@code true} if import script actions should be sent to this target; {@code false} if they should not.
*/
public boolean acceptsImportScriptActions();
/**
* Prepare for accepting actions
*
* @throws SchemaManagementException If there is a problem preparing the target.
*/
public void prepare();
/**
* Accept a management action. For stream/writer-based targets, this would indicate to write the action; for
* JDBC based targets, it would indicate to execute the action
*
* @param action The action to perform.
*
* @throws SchemaManagementException If there is a problem accepting the action.
*/
public void accept(String action);
/**
* Release the target after all actions have been processed.
*
* @throws SchemaManagementException If there is a problem releasing the target.
*/
public void release();
}

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.tool.schema.spi;
import java.util.EnumSet;
import org.hibernate.tool.schema.TargetType;
/**
* Describes the target(s) of schema create, drop and migrate actions.
* <p/>
* The purpose of this "abstraction" of a target is to enable other
* back-ends (OGM) by simply describing where to target rather than
* defining the targets themselves. The reason being that ultimately
* the Java type representing a "DDL command" sent to these targets
* might be different (e.g., String for JDBC).
*
* @author Steve Ebersole
*/
public interface TargetDescriptor {
/**
* The target type described here.
*
* @return The target type.
*/
EnumSet<TargetType> getTargetTypes();
/**
* If {@link #getTargetTypes()} includes scripts, return a representation
* of the script file to write to. Otherwise, returns {@code null}.
* <p/>
* If {@link #getTargetTypes()} includes scripts, and this method returns
* {@code null} an exception will be thrown interpreting this descriptor
*
* @return The script output target
*/
ScriptTargetOutput getScriptTargetOutput();
}

View File

@ -9,6 +9,7 @@ package org.hibernate.test.annotations;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.EnumSet;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
@ -22,6 +23,7 @@ import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.naming.ImplicitNamingStrategyJpaCompliantImpl;
import org.hibernate.dialect.Oracle10gDialect;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.testing.SkipForDialect;
@ -431,16 +433,12 @@ public class EntityTest extends BaseNonConfigCoreFunctionalTestCase {
@Before
public void runCreateSchema() {
schemaExport().create( false, true );
}
private SchemaExport schemaExport() {
return new SchemaExport( serviceRegistry(), metadata() );
new SchemaExport().create( EnumSet.of( TargetType.DATABASE ), metadata() );
}
@After
public void runDropSchema() {
schemaExport().drop( false, true );
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE ), metadata() );
}
}

View File

@ -57,7 +57,7 @@ public class JoinColumnOverrideTest extends BaseUnitTestCase {
boolean foundPointyToothCreate = false;
boolean foundTwinkleToesCreate = false;
List<String> commands = new SchemaCreatorImpl().generateCreationCommands( metadata, false );
List<String> commands = new SchemaCreatorImpl( ssr ).generateCreationCommands( metadata, false );
for ( String command : commands ) {
log.debug( command );

View File

@ -58,7 +58,7 @@ public class JoinColumnOverrideTest extends BaseUnitTestCase {
boolean foundPointyToothCreate = false;
boolean foundTwinkleToesCreate = false;
List<String> commands = new SchemaCreatorImpl().generateCreationCommands( metadata, false );
List<String> commands = new SchemaCreatorImpl( ssr ).generateCreationCommands( metadata, false );
for ( String command : commands ) {
log.debug( command );

View File

@ -7,7 +7,6 @@
package org.hibernate.test.annotations.list;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.Id;
@ -21,11 +20,11 @@ import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.tool.schema.internal.TargetStdoutImpl;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
@ -49,6 +48,7 @@ public class ListMappingTest extends BaseUnitTestCase {
@Before
public void before() {
ssr = new StandardServiceRegistryBuilder()
.applySetting( AvailableSettings.FORMAT_SQL, false )
.build();
}
@ -79,7 +79,7 @@ public class ListMappingTest extends BaseUnitTestCase {
// make sure the OrderColumn is part of the collection table
assertTrue( asList.getCollectionTable().containsColumn( positionColumn ) );
class TargetImpl extends TargetStdoutImpl {
class TargetImpl extends GenerationTargetToStdout {
boolean found = false;
@Override
public void accept(String action) {
@ -94,10 +94,10 @@ public class ListMappingTest extends BaseUnitTestCase {
TargetImpl target = new TargetImpl();
ssr.getService( SchemaManagementTool.class ).getSchemaCreator( Collections.emptyMap() ).doCreation(
new SchemaCreatorImpl( ssr ).doCreation(
metadata,
true,
Collections.<Target>singletonList( target )
target
);
assertTrue( target.found );

View File

@ -8,7 +8,6 @@
//$Id: A320.java 14736 2008-06-04 14:23:42Z hardy.ferentschik $
package org.hibernate.test.annotations.onetoone.primarykey;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -18,11 +17,8 @@ import org.hibernate.boot.MetadataSources;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.junit.Assert;
@ -38,7 +34,9 @@ import org.jboss.logging.Logger;
*/
public class NullablePrimaryKeyTest {
private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
@Test
@Test
@SuppressWarnings("unchecked")
public void testGeneratedSql() {
Map settings = new HashMap();
@ -53,36 +51,7 @@ public class NullablePrimaryKeyTest {
ms.addAnnotatedClass(Person.class);
final Metadata metadata = ms.buildMetadata();
final SchemaCreator schemaCreator = serviceRegistry.getService( SchemaManagementTool.class )
.getSchemaCreator( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
final List<String> commands = new ArrayList<String>();
schemaCreator.doCreation(
metadata,
false,
new Target() {
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
}
@Override
public void accept(String action) {
commands.add( action );
}
@Override
public void release() {
}
}
);
final List<String> commands = new SchemaCreatorImpl( serviceRegistry ).generateCreationCommands( metadata, false );
for (String s : commands) {
log.debug( s );
}

Some files were not shown because too many files have changed in this diff Show More