HHH-6444 : Integrate new metamodel into SchemaExport

This commit is contained in:
Gail Badner 2011-07-18 13:13:01 -07:00
parent 55a630b8fd
commit 5b7ee4f11b
9 changed files with 244 additions and 38 deletions

View File

@ -530,8 +530,6 @@ public SessionFactoryImpl(
LOG.debug( "Building session factory" );
// TODO: remove initialization of final variables; just setting to null to make compiler happy
this.name = null;
this.uuid = null;
this.fetchProfiles = null;
this.queryCache = null;
this.updateTimestampsCache = null;
@ -764,6 +762,34 @@ public void sessionFactoryClosed(SessionFactory factory) {
persister.postInstantiate();
}
//JNDI + Serialization:
name = settings.getSessionFactoryName();
try {
uuid = (String) UUID_GENERATOR.generate(null, null);
}
catch (Exception e) {
throw new AssertionFailure("Could not generate UUID");
}
SessionFactoryRegistry.INSTANCE.addSessionFactory( uuid, name, this, serviceRegistry.getService( JndiService.class ) );
LOG.debugf("Instantiated session factory");
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( metadata ).create( false, true );
}
/*
if ( settings.isAutoUpdateSchema() ) {
new SchemaUpdate( metadata ).execute( false, true );
}
if ( settings.isAutoValidateSchema() ) {
new SchemaValidator( metadata ).validate();
}
*/
if ( settings.isAutoDropSchema() ) {
schemaExport = new SchemaExport( metadata );
}
// TODO: implement
}

View File

@ -34,7 +34,7 @@
*
* @author Steve Ebersole
*/
public interface Constraint {
public interface Constraint extends Exportable {
/**
* Obtain the table to which this constraint applies.
*

View File

@ -24,17 +24,26 @@
package org.hibernate.metamodel.relational;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.MappingException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.Metadata;
import org.hibernate.metamodel.source.MetadataImplementor;
/**
* Represents a database and manages the named schema/catalog pairs defined within.
*
* @author Steve Ebersole
* @author Gail Badner
*/
public class Database {
private final Schema.Name implicitSchemaName;
@ -93,4 +102,133 @@ public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabase
public Iterable<AuxiliaryDatabaseObject> getAuxiliaryDatabaseObjects() {
return auxiliaryDatabaseObjects;
}
public String[] generateSchemaCreationScript(MetadataImplementor metadata) {
Dialect dialect = getDialect( metadata );
Set<String> exportIdentifiers = new HashSet<String>( 50 );
List<String> script = new ArrayList<String>( 50 );
for ( Schema schema : schemaMap.values() ) {
// TODO: create schema/catalog???
for ( Table table : schema.getTables() ) {
addSqlCreateStrings( metadata, exportIdentifiers, script, table );
}
}
for ( Schema schema : schemaMap.values() ) {
for ( Table table : schema.getTables() ) {
if ( ! dialect.supportsUniqueConstraintInCreateAlterTable() ) {
for ( UniqueKey uniqueKey : table.getUniqueKeys() ) {
addSqlCreateStrings( metadata, exportIdentifiers, script, uniqueKey );
}
}
for ( Index index : table.getIndexes() ) {
addSqlCreateStrings( metadata, exportIdentifiers, script, index );
}
if ( dialect.hasAlterTable() ) {
for ( ForeignKey foreignKey : table.getForeignKeys() ) {
// only add the foreign key if its target is a physical table
if ( Table.class.isInstance( foreignKey.getTargetTable() ) ) {
addSqlCreateStrings( metadata, exportIdentifiers, script, foreignKey );
}
}
}
}
}
// TODO: add sql create strings from PersistentIdentifierGenerator.sqlCreateStrings()
for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : auxiliaryDatabaseObjects ) {
if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
addSqlCreateStrings( metadata, exportIdentifiers, script, auxiliaryDatabaseObject );
}
}
return ArrayHelper.toStringArray( script );
}
public String[] generateDropSchemaScript(MetadataImplementor metadata) {
Dialect dialect = getDialect( metadata );
Set<String> exportIdentifiers = new HashSet<String>( 50 );
List<String> script = new ArrayList<String>( 50 );
// drop them in reverse order in case db needs it done that way...
for ( int i = auxiliaryDatabaseObjects.size() - 1 ; i >= 0 ; i-- ) {
AuxiliaryDatabaseObject object = auxiliaryDatabaseObjects.get( i );
if ( object.appliesToDialect( dialect ) ) {
addSqlDropStrings( metadata, exportIdentifiers, script, object );
}
}
if ( dialect.dropConstraints() ) {
for ( Schema schema : schemaMap.values() ) {
for ( Table table : schema.getTables() ) {
for ( ForeignKey foreignKey : table.getForeignKeys() ) {
// only include foreign key if the target table is physical
if ( foreignKey.getTargetTable() instanceof Table ) {
addSqlDropStrings( metadata, exportIdentifiers, script, foreignKey );
}
}
}
}
}
for ( Schema schema : schemaMap.values() ) {
for ( Table table : schema.getTables() ) {
addSqlDropStrings( metadata, exportIdentifiers, script, table );
}
}
// TODO: add sql drop strings from PersistentIdentifierGenerator.sqlCreateStrings()
// TODO: drop schemas/catalogs???
return ArrayHelper.toStringArray( script );
}
private static Dialect getDialect(MetadataImplementor metadata) {
return metadata.getServiceRegistry().getService( JdbcServices.class ).getDialect();
}
private static void addSqlDropStrings(
MetadataImplementor metadata,
Set<String> exportIdentifiers,
List<String> script,
Exportable exportable) {
addSqlStrings(
exportIdentifiers, script, exportable.getExportIdentifier(), exportable.sqlDropStrings( metadata )
);
}
private static void addSqlCreateStrings(
MetadataImplementor metadata,
Set<String> exportIdentifiers,
List<String> script,
Exportable exportable) {
addSqlStrings(
exportIdentifiers, script, exportable.getExportIdentifier(), exportable.sqlCreateStrings( metadata )
);
}
private static void addSqlStrings(
Set<String> exportIdentifiers,
List<String> script,
String exportIdentifier,
String[] sqlStrings) {
if ( sqlStrings == null ) {
return;
}
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new MappingException(
"SQL strings added more than once for: " + exportIdentifier
);
}
exportIdentifiers.add( exportIdentifier );
script.addAll( Arrays.asList( sqlStrings ) );
}
}

View File

@ -38,6 +38,17 @@ protected Index(Table table, String name) {
super( table, name );
}
@Override
public String getExportIdentifier() {
StringBuilder sb = new StringBuilder( getTable().getLoggableValueQualifier());
sb.append( ".IDX" );
for ( Column column : getColumns() ) {
sb.append( '_' ).append( column.getColumnName().getName() );
}
return sb.toString();
}
public String[] sqlCreateStrings(MetadataImplementor metadata) {
return new String[] {
buildSqlCreateIndexString(

View File

@ -66,6 +66,10 @@ public Table locateOrCreateTable(Identifier name) {
return existing;
}
public Iterable<Table> getTables() {
return tables.values();
}
public InLineView getInLineView(String logicalName) {
return inLineViews.get( logicalName );
}

View File

@ -24,13 +24,12 @@
package org.hibernate.metamodel.relational;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.source.MetadataImplementor;
/**
@ -45,10 +44,10 @@ public class Table extends AbstractTableSpecification implements Exportable {
private final ObjectName objectName;
private final String qualifiedName;
private LinkedHashMap<String,Index> indexes;
private LinkedHashMap<String,UniqueKey> uniqueKeys;
private List<CheckConstraint> checkConstraints;
private Set<String> comments;
private final LinkedHashMap<String,Index> indexes = new LinkedHashMap<String,Index>();
private final LinkedHashMap<String,UniqueKey> uniqueKeys = new LinkedHashMap<String,UniqueKey>();
private final List<CheckConstraint> checkConstraints = new ArrayList<CheckConstraint>();
private final List<String> comments = new ArrayList<String>();
public Table(Schema database, String tableName) {
this( database, Identifier.toIdentifier( tableName ) );
@ -91,13 +90,10 @@ public Iterable<Index> getIndexes() {
}
public Index getOrCreateIndex(String name) {
if(indexes!=null && indexes.containsKey( name )){
if( indexes.containsKey( name ) ){
return indexes.get( name );
}
Index index = new Index( this, name );
if ( indexes == null ) {
indexes = new LinkedHashMap<String,Index>();
}
indexes.put(name, index );
return index;
}
@ -108,13 +104,10 @@ public Iterable<UniqueKey> getUniqueKeys() {
}
public UniqueKey getOrCreateUniqueKey(String name) {
if(uniqueKeys!=null && uniqueKeys.containsKey( name )){
if( uniqueKeys.containsKey( name ) ){
return uniqueKeys.get( name );
}
UniqueKey uniqueKey = new UniqueKey( this, name );
if ( uniqueKeys == null ) {
uniqueKeys = new LinkedHashMap<String,UniqueKey>();
}
uniqueKeys.put(name, uniqueKey );
return uniqueKey;
}
@ -126,9 +119,6 @@ public Iterable<CheckConstraint> getCheckConstraints() {
@Override
public void addCheckConstraint(String checkCondition) {
if ( checkConstraints == null ) {
checkConstraints = new ArrayList<CheckConstraint>();
}
//todo ? StringHelper.isEmpty( checkCondition );
//todo default name?
checkConstraints.add( new CheckConstraint( this, "", checkCondition ) );
@ -141,9 +131,6 @@ public Iterable<String> getComments() {
@Override
public void addComment(String comment) {
if ( comments == null ) {
comments = new HashSet<String>();
}
comments.add( comment );
}
@ -194,14 +181,13 @@ public String[] sqlCreateStrings(MetadataImplementor metadata) {
if ( isPrimaryKeyIdentity && colName.equals( pkColName ) ) {
// to support dialects that have their own identity data type
if ( dialect.hasDataTypeInIdentityColumn() ) {
buf.append( col.getDatatype().getTypeName() );
buf.append( getTypeString( col, dialect ) );
}
buf.append( ' ' )
.append( dialect.getIdentityColumnString( col.getDatatype().getTypeCode() ) );
}
else {
buf.append( col.getDatatype().getTypeName() );
buf.append( getTypeString( col, dialect ) );
String defaultValue = col.getDefaultValue();
if ( defaultValue != null ) {
@ -263,21 +249,27 @@ public String[] sqlCreateStrings(MetadataImplementor metadata) {
}
buf.append( ')' );
buf.append( dialect.getTableTypeString() );
if ( comments != null ) {
boolean first = true;
for ( String comment : comments ) {
if ( first ) {
first = false;
}
else {
buf.append( ' ' );
}
buf.append( dialect.getTableComment( comment ) );
}
String[] sqlStrings = new String[ comments.size() + 1 ];
sqlStrings[ 0 ] = buf.toString();
for ( int i = 0 ; i < comments.size(); i++ ) {
sqlStrings[ i + 1 ] = dialect.getTableComment( comments.get( i ) );
}
return new String[] { buf.append( dialect.getTableTypeString() ).toString() };
return sqlStrings;
}
private static String getTypeString(Column col, Dialect dialect) {
return col.getSqlType() == null ?
dialect.getTypeName(
col.getDatatype().getTypeCode(),
col.getSize().getLength(),
col.getSize().getPrecision(),
col.getSize().getScale()
) :
col.getSqlType();
}
@Override

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.metamodel.relational;
import java.util.concurrent.atomic.AtomicInteger;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
@ -37,6 +39,16 @@ protected UniqueKey(Table table, String name) {
super( table, name );
}
@Override
public String getExportIdentifier() {
StringBuilder sb = new StringBuilder( getTable().getLoggableValueQualifier());
sb.append( ".UK" );
for ( Column column : getColumns() ) {
sb.append( '_' ).append( column.getColumnName().getName() );
}
return sb.toString();
}
public boolean isCreationVetoed(Dialect dialect) {
if ( dialect.supportsNotNullUnique() ) {
return false;

View File

@ -55,8 +55,10 @@
import org.hibernate.internal.util.ConfigHelper;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.metamodel.source.MetadataImplementor;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.ServiceRegistryBuilder;
import org.hibernate.service.config.spi.ConfigurationService;
import org.hibernate.service.internal.BasicServiceRegistryImpl;
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
@ -120,6 +122,25 @@ public SchemaExport(ServiceRegistry serviceRegistry, Configuration configuration
this.createSQL = configuration.generateSchemaCreationScript( dialect );
}
public SchemaExport(MetadataImplementor metadata) {
ServiceRegistry serviceRegistry = metadata.getServiceRegistry();
this.connectionHelper = new SuppliedConnectionProviderConnectionHelper(
serviceRegistry.getService( ConnectionProvider.class )
);
this.sqlStatementLogger = serviceRegistry.getService( JdbcServices.class ).getSqlStatementLogger();
this.formatter = ( sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
this.sqlExceptionHelper = serviceRegistry.getService( JdbcServices.class ).getSqlExceptionHelper();
this.importFiles = ConfigurationHelper.getString(
Environment.HBM2DDL_IMPORT_FILES,
serviceRegistry.getService( ConfigurationService.class ).getSettings(),
DEFAULT_IMPORT_FILE
);
this.dropSQL = metadata.getDatabase().generateDropSchemaScript( metadata );
this.createSQL = metadata.getDatabase().generateSchemaCreationScript( metadata );
}
/**
* Create a schema exporter for the given Configuration
*

View File

@ -66,6 +66,7 @@ public void testBuildSessionFactoryWithDefaultOptions() {
assertTrue( EntityNotFoundDelegate.class.isInstance(
sessionFactory.getSessionFactoryOptions().getEntityNotFoundDelegate()
) );
sessionFactory.close();
}
@Test
@ -83,6 +84,7 @@ public void handleEntityNotFound(String entityName, Serializable id) {
SessionFactory sessionFactory = sessionFactoryBuilder.buildSessionFactory();
assertSame( interceptor, sessionFactory.getSessionFactoryOptions().getInterceptor() );
assertSame( entityNotFoundDelegate, sessionFactory.getSessionFactoryOptions().getEntityNotFoundDelegate() );
sessionFactory.close();
}
private SessionFactoryBuilder getSessionFactoryBuilder() {