HHH-6431 : Add Exportable.sqlCreateStrings() and sqlDropStrings() and implementations

This commit is contained in:
Gail Badner 2011-07-14 10:08:39 -07:00
parent c558583346
commit 53e1a37adf
9 changed files with 523 additions and 14 deletions

View File

@ -26,12 +26,17 @@ package org.hibernate.metamodel.relational;
import java.util.List;
import java.util.ArrayList;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* Support for writing {@link Constraint} implementations
*
* @todo do we need to support defining these on particular schemas/catalogs?
*
* @author Steve Ebersole
* @author Gail Badner
*/
public abstract class AbstractConstraint implements Constraint {
private final TableSpecification table;
@ -55,6 +60,10 @@ public abstract class AbstractConstraint implements Constraint {
return columns;
}
protected int getColumnSpan() {
return columns.size();
}
protected List<Column> internalColumnAccess() {
return columns;
}
@ -65,4 +74,46 @@ public abstract class AbstractConstraint implements Constraint {
}
columns.add( column );
}
protected boolean isCreationVetoed(Dialect dialect) {
return false;
}
protected abstract String sqlConstraintStringInAlterTable(Dialect dialect);
public String[] sqlDropStrings(MetadataImplementor metadata) {
Dialect dialect = getDialect( metadata );
if ( isCreationVetoed( dialect ) ) {
return null;
}
else {
return new String[] {
new StringBuffer()
.append( "alter table " )
.append( getTable().getQualifiedName( dialect ) )
.append( " drop constraint " )
.append( dialect.quote( getName() ) )
.toString()
};
}
}
public String[] sqlCreateStrings(MetadataImplementor metadata) {
Dialect dialect = getDialect( metadata );
if ( isCreationVetoed( dialect ) ) {
return null;
}
else {
return new String[] {
new StringBuilder( "alter table " )
.append( getTable().getQualifiedName( dialect ) )
.append( sqlConstraintStringInAlterTable( dialect ) )
.toString()
};
}
}
protected static Dialect getDialect(MetadataImplementor metadata) {
return metadata.getServiceRegistry().getService( JdbcServices.class ).getDialect();
}
}

View File

@ -25,13 +25,20 @@ package org.hibernate.metamodel.relational;
import java.util.Set;
import org.hibernate.HibernateException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.metamodel.Metadata;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* @author Steve Ebersole
* @author Gail Badner
*/
public class BasicAuxiliaryDatabaseObjectImpl extends AbstractAuxiliaryDatabaseObject {
private static final String CATALOG_NAME_PLACEHOLDER = "${catalog}";
private static final String SCHEMA_NAME_PLACEHOLDER = "${schema}";
private final String createString;
private final String dropString;
@ -41,15 +48,20 @@ public class BasicAuxiliaryDatabaseObjectImpl extends AbstractAuxiliaryDatabaseO
this.dropString = dropString;
}
// TODO: fix this when HHH-6431 is fixed
//@Override
public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) {
return createString;
@Override
public String[] sqlCreateStrings(MetadataImplementor metadata) {
return new String[] { injectCatalogAndSchema( createString, metadata.getOptions() ) };
}
// TODO: fix this when HHH-6431 is fixed
//@Override
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
return dropString;
@Override
public String[] sqlDropStrings(MetadataImplementor metadata) {
return new String[] { injectCatalogAndSchema( dropString, metadata.getOptions() ) };
}
private String injectCatalogAndSchema(String ddlString, Metadata.Options options) {
String rtn = StringHelper.replace( ddlString, CATALOG_NAME_PLACEHOLDER, options.getDefaultCatalogName() );
rtn = StringHelper.replace( rtn, SCHEMA_NAME_PLACEHOLDER, options.getDefaultSchemaName() );
return rtn;
}
}

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.metamodel.relational;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* Contract for entities (in the ERD sense) which can be exported via {@code CREATE}, {@code ALTER}, etc
*
@ -35,4 +37,19 @@ public interface Exportable {
* @return The exporting identifier.
*/
public String getExportIdentifier();
/**
* Gets the SQL strings for creating the database object.
* @param metadata the metadata
* @return the SQL strings for creating the database object.
*/
public String[] sqlCreateStrings(MetadataImplementor metadata);
/**
* Gets the SQL strings for dropping the database object.
* @param metadata the metadata
* @return the SQL strings for dropping the database object.
*/
public String[] sqlDropStrings(MetadataImplementor metadata);
}

View File

@ -24,12 +24,15 @@
package org.hibernate.metamodel.relational;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.hibernate.MappingException;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
import org.jboss.logging.Logger;
import org.jboss.logging.Logger.Level;
/**
* Models the notion of a foreign key.
@ -43,6 +46,8 @@ import org.jboss.logging.Logger.Level;
public class ForeignKey extends AbstractConstraint implements Constraint, Exportable {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, AbstractConstraint.class.getName());
private static final String ON_DELETE = " on delete ";
private static final String ON_UPDATE = " on update ";
private final TableSpecification targetTable;
private List<Column> targetColumns;
@ -129,11 +134,87 @@ public class ForeignKey extends AbstractConstraint implements Constraint, Export
this.updateRule = updateRule;
}
@Override
public String[] sqlDropStrings(MetadataImplementor metadata) {
Dialect dialect = getDialect( metadata );
return new String[] {
"alter table " +
getTable().getQualifiedName( dialect ) +
dialect.getDropForeignKeyString() +
getName()
};
}
public String sqlConstraintStringInAlterTable(Dialect dialect) {
String[] columnNames = new String[ getColumnSpan() ];
String[] targetColumnNames = new String[ getColumnSpan() ];
int i=0;
Iterator<Column> itTargetColumn = getTargetColumns().iterator();
for ( Column column : getColumns() ) {
if ( ! itTargetColumn.hasNext() ) {
throw new MappingException( "More constraint columns that foreign key target columns." );
}
columnNames[i] = column.getColumnName().encloseInQuotesIfQuoted( dialect );
targetColumnNames[i] = ( itTargetColumn.next() ).getColumnName().encloseInQuotesIfQuoted( dialect );
i++;
}
if ( itTargetColumn.hasNext() ) {
throw new MappingException( "More foreign key target columns than constraint columns." );
}
StringBuilder sb =
new StringBuilder(
dialect.getAddForeignKeyConstraintString(
getName(),
columnNames,
targetTable.getQualifiedName( dialect ),
targetColumnNames,
this.targetColumns == null
)
);
// TODO: If a dialect does not support cascade-delete, can it support other actions? (HHH-6428)
// For now, assume not.
if ( dialect.supportsCascadeDelete() ) {
if ( deleteRule != ReferentialAction.NO_ACTION ) {
sb.append( ON_DELETE ).append( deleteRule.getActionString() );
}
if ( updateRule != ReferentialAction.NO_ACTION ) {
sb.append( ON_UPDATE ).append( updateRule.getActionString() );
}
}
return sb.toString();
}
public static enum ReferentialAction {
NO_ACTION,
CASCADE,
SET_NULL,
SET_DEFAULT,
RESTRICT
NO_ACTION {
private static final String ACTION_STRING = "no action";
public String getActionString() {
return ACTION_STRING;
}
},
CASCADE {
private static final String ACTION_STRING = "cascade";
public String getActionString() {
return ACTION_STRING;
}
},
SET_NULL {
private static final String ACTION_STRING = "set null";
public String getActionString() {
return ACTION_STRING;
}
},
SET_DEFAULT {
private static final String ACTION_STRING = "set default";
public String getActionString() {
return ACTION_STRING;
}
},
RESTRICT {
private static final String ACTION_STRING = "restrict";
public String getActionString() {
return ACTION_STRING;
}
};
public abstract String getActionString();
}
}

View File

@ -23,6 +23,10 @@
*/
package org.hibernate.metamodel.relational;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* Models a SQL <tt>INDEX</tt>
*
@ -33,4 +37,88 @@ public class Index extends AbstractConstraint implements Constraint {
protected Index(Table table, String name) {
super( table, name );
}
public String[] sqlCreateStrings(MetadataImplementor metadata) {
return new String[] {
buildSqlCreateIndexString(
getDialect( metadata ),
getName(),
getTable(),
getColumns(),
false
)
};
}
/* package-protected */
static String buildSqlDropIndexString(
Dialect dialect,
TableSpecification table,
String name ) {
return "drop index " +
StringHelper.qualify(
table.getQualifiedName( dialect ),
name
);
}
public static String buildSqlCreateIndexString(
Dialect dialect,
String name,
TableSpecification table,
Iterable<Column> columns,
boolean unique
) {
//TODO handle supportsNotNullUnique=false, but such a case does not exist in the wild so far
StringBuilder buf = new StringBuilder( "create" )
.append( unique ?
" unique" :
"" )
.append( " index " )
.append( dialect.qualifyIndexName() ?
name :
StringHelper.unqualify( name ) )
.append( " on " )
.append( table.getQualifiedName( dialect ) )
.append( " (" );
boolean first = true;
for ( Column column : columns ) {
if ( first ) {
first = false;
}
else {
buf.append( ", " );
}
buf.append( ( column.getColumnName().encloseInQuotesIfQuoted( dialect ) ) );
}
buf.append( ")" );
return buf.toString();
}
public String sqlConstraintStringInAlterTable(Dialect dialect) {
StringBuilder buf = new StringBuilder( " index (" );
boolean first = true;
for ( Column column : getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append( ", " );
}
buf.append( column.getColumnName().encloseInQuotesIfQuoted( dialect ) );
}
return buf.append( ')' ).toString();
}
public String[] sqlDropStrings(MetadataImplementor metadata) {
return new String[] {
new StringBuffer( "drop index " )
.append(
StringHelper.qualify(
getTable().getQualifiedName( getDialect( metadata ) ),
getName()
)
).toString()
};
}
}

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.metamodel.relational;
import org.hibernate.dialect.Dialect;
/**
* Models a table's primary key.
* <p/>
@ -58,4 +60,37 @@ public class PrimaryKey extends AbstractConstraint implements Constraint, Export
public String getExportIdentifier() {
return getTable().getLoggableValueQualifier() + ".PK";
}
public String sqlConstraintStringInCreateTable(Dialect dialect) {
StringBuilder buf = new StringBuilder("primary key (");
boolean first = true;
for ( Column column : getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append(", ");
}
buf.append( column.getColumnName().encloseInQuotesIfQuoted( dialect ) );
}
return buf.append(')').toString();
}
public String sqlConstraintStringInAlterTable(Dialect dialect) {
StringBuffer buf = new StringBuffer(
dialect.getAddPrimaryKeyConstraintString( getName() )
).append('(');
boolean first = true;
for ( Column column : getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append(", ");
}
buf.append( column.getColumnName().encloseInQuotesIfQuoted( dialect ) );
}
return buf.append(')').toString();
}
}

View File

@ -23,6 +23,11 @@
*/
package org.hibernate.metamodel.relational;
import org.hibernate.MappingException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* Models a database {@code SEQUENCE}.
*
@ -66,4 +71,17 @@ public class Sequence implements Exportable {
public int getIncrementSize() {
return incrementSize;
}
public String[] sqlCreateStrings(MetadataImplementor metadata) throws MappingException {
return getDialect( metadata ).getCreateSequenceStrings( name, initialValue,incrementSize );
}
public String[] sqlDropStrings(MetadataImplementor metadata) throws MappingException {
return getDialect( metadata ).getDropSequenceStrings( name );
}
private Dialect getDialect(MetadataImplementor metadata) {
return metadata.getServiceRegistry().getService( JdbcServices.class ).getDialect();
}
}

View File

@ -30,6 +30,8 @@ import java.util.List;
import java.util.Set;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.source.spi.MetadataImplementor;
/**
* Models the concept of a relational <tt>TABLE</tt> (or <tt>VIEW</tt>).
@ -150,6 +152,149 @@ public class Table extends AbstractTableSpecification implements Exportable {
return objectName.toText( dialect );
}
public String[] sqlCreateStrings(MetadataImplementor metadata) {
Dialect dialect = metadata.getServiceRegistry().getService( JdbcServices.class ).getDialect();
boolean hasPrimaryKey = getPrimaryKey().getColumns().iterator().hasNext();
StringBuilder buf =
new StringBuilder(
hasPrimaryKey ? dialect.getCreateTableString() : dialect.getCreateMultisetTableString() )
.append( ' ' )
.append( objectName.toText( dialect ) )
.append( " (" );
// TODO: fix this when identity columns are supported by new metadata (HHH-6436)
// for now, assume false
//boolean identityColumn = idValue != null && idValue.isIdentityColumn( metadata.getIdentifierGeneratorFactory(), dialect );
boolean isPrimaryKeyIdentity = false;
// Try to find out the name of the primary key to create it as identity if the IdentityGenerator is used
String pkColName = null;
if ( hasPrimaryKey && isPrimaryKeyIdentity ) {
Column pkColumn = getPrimaryKey().getColumns().iterator().next();
pkColName = pkColumn.getColumnName().encloseInQuotesIfQuoted( dialect );
}
boolean isFirst = true;
for ( SimpleValue simpleValue : values() ) {
if ( ! Column.class.isInstance( simpleValue ) ) {
continue;
}
if ( isFirst ) {
isFirst = false;
}
else {
buf.append( ", " );
}
Column col = ( Column ) simpleValue;
String colName = col.getColumnName().encloseInQuotesIfQuoted( dialect );
buf.append( colName ).append( ' ' );
if ( isPrimaryKeyIdentity && colName.equals( pkColName ) ) {
// to support dialects that have their own identity data type
if ( dialect.hasDataTypeInIdentityColumn() ) {
buf.append( col.getDatatype().getTypeName() );
}
buf.append( ' ' )
.append( dialect.getIdentityColumnString( col.getDatatype().getTypeCode() ) );
}
else {
buf.append( col.getDatatype().getTypeName() );
String defaultValue = col.getDefaultValue();
if ( defaultValue != null ) {
buf.append( " default " ).append( defaultValue );
}
if ( col.isNullable() ) {
buf.append( dialect.getNullColumnString() );
}
else {
buf.append( " not null" );
}
}
boolean useUniqueConstraint = col.isUnique() &&
( !col.isNullable() || dialect.supportsNotNullUnique() );
if ( useUniqueConstraint ) {
if ( dialect.supportsUnique() ) {
buf.append( " unique" );
}
else {
UniqueKey uk = getOrCreateUniqueKey( col.getColumnName().encloseInQuotesIfQuoted( dialect ) + '_' );
uk.addColumn( col );
}
}
if ( col.getCheckCondition() != null && dialect.supportsColumnCheck() ) {
buf.append( " check (" )
.append( col.getCheckCondition() )
.append( ")" );
}
String columnComment = col.getComment();
if ( columnComment != null ) {
buf.append( dialect.getColumnComment( columnComment ) );
}
}
if ( hasPrimaryKey ) {
buf.append( ", " )
.append( getPrimaryKey().sqlConstraintStringInCreateTable( dialect ) );
}
if ( dialect.supportsUniqueConstraintInCreateAlterTable() ) {
for ( UniqueKey uk : uniqueKeys.values() ) {
String constraint = uk.sqlConstraintStringInCreateTable( dialect );
if ( constraint != null ) {
buf.append( ", " ).append( constraint );
}
}
}
if ( dialect.supportsTableCheck() ) {
for ( CheckConstraint checkConstraint : checkConstraints ) {
buf.append( ", check (" )
.append( checkConstraint )
.append( ')' );
}
}
buf.append( ')' );
if ( comments != null ) {
boolean first = true;
for ( String comment : comments ) {
if ( first ) {
first = false;
}
else {
buf.append( ' ' );
}
buf.append( dialect.getTableComment( comment ) );
}
}
return new String[] { buf.append( dialect.getTableTypeString() ).toString() };
}
@Override
public String[] sqlDropStrings(MetadataImplementor metadata) {
Dialect dialect = metadata.getServiceRegistry().getService( JdbcServices.class ).getDialect();
StringBuilder buf = new StringBuilder( "drop table " );
if ( dialect.supportsIfExistsBeforeTableName() ) {
buf.append( "if exists " );
}
buf.append( getQualifiedName( dialect ) )
.append( dialect.getCascadeConstraintsString() );
if ( dialect.supportsIfExistsAfterTableName() ) {
buf.append( " if exists" );
}
return new String[] { buf.toString() };
}
@Override
public String toString() {
return "Table{name=" + qualifiedName + '}';

View File

@ -23,6 +23,9 @@
*/
package org.hibernate.metamodel.relational;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
/**
* Models a SQL <tt>INDEX</tt> defined as UNIQUE
*
@ -33,4 +36,63 @@ public class UniqueKey extends AbstractConstraint implements Constraint {
protected UniqueKey(Table table, String name) {
super( table, name );
}
public boolean isCreationVetoed(Dialect dialect) {
if ( dialect.supportsNotNullUnique() ) {
return false;
}
for ( Column column : getColumns() ) {
if ( column.isNullable() ) {
return true;
}
}
return false;
}
public String sqlConstraintStringInCreateTable(Dialect dialect) {
StringBuffer buf = new StringBuffer( "unique (" );
boolean hadNullableColumn = false;
boolean first = true;
for ( Column column : getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append(", ");
}
if ( ! hadNullableColumn && column.isNullable() ) {
hadNullableColumn = true;
}
buf.append( column.getColumnName().encloseInQuotesIfQuoted( dialect ) );
}
//do not add unique constraint on DB not supporting unique and nullable columns
return ! hadNullableColumn || dialect.supportsNotNullUnique() ?
buf.append( ')' ).toString() :
null;
}
public String sqlConstraintStringInAlterTable(Dialect dialect) {
StringBuffer buf = new StringBuffer(
dialect.getAddPrimaryKeyConstraintString( getName() )
).append( '(' );
boolean nullable = false;
boolean first = true;
for ( Column column : getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append(", ");
}
if ( ! nullable && column.isNullable() ) {
nullable = true;
}
buf.append( column.getColumnName().encloseInQuotesIfQuoted( dialect ) );
}
return ! nullable || dialect.supportsNotNullUnique() ?
StringHelper.replace( buf.append( ')' ).toString(), "primary key", "unique" ) :
//TODO: improve this hack!
null;
}
}