HHH-8737 deprecated UniqueDelegate, created StandardUniqueKeyExporter,
stripped SQL generation from metamodel.spi.relational constraints, created applyConstraints/dropConstraints on Dialect
This commit is contained in:
parent
4b945fae4c
commit
8ca6ff21fa
|
@ -476,6 +476,7 @@ public class DB2Dialect extends Dialect {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public UniqueDelegate getUniqueDelegate() {
|
||||
return uniqueDelegate;
|
||||
}
|
||||
|
|
|
@ -32,6 +32,8 @@ import java.sql.NClob;
|
|||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
@ -64,6 +66,7 @@ import org.hibernate.dialect.pagination.LimitHandler;
|
|||
import org.hibernate.dialect.unique.DefaultUniqueDelegate;
|
||||
import org.hibernate.dialect.unique.UniqueDelegate;
|
||||
import org.hibernate.engine.jdbc.LobCreator;
|
||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||
import org.hibernate.engine.spi.RowSelection;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.exception.spi.ConversionContext;
|
||||
|
@ -78,13 +81,15 @@ import org.hibernate.internal.util.ReflectHelper;
|
|||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.internal.util.io.StreamCopier;
|
||||
import org.hibernate.mapping.Column;
|
||||
import org.hibernate.metamodel.spi.TypeContributions;
|
||||
import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject;
|
||||
import org.hibernate.metamodel.spi.relational.Column;
|
||||
import org.hibernate.metamodel.spi.relational.Constraint;
|
||||
import org.hibernate.metamodel.spi.relational.ForeignKey;
|
||||
import org.hibernate.metamodel.spi.relational.Index;
|
||||
import org.hibernate.metamodel.spi.relational.Sequence;
|
||||
import org.hibernate.metamodel.spi.relational.Table;
|
||||
import org.hibernate.metamodel.spi.relational.UniqueKey;
|
||||
import org.hibernate.persister.entity.Lockable;
|
||||
import org.hibernate.procedure.internal.StandardCallableStatementSupport;
|
||||
import org.hibernate.procedure.spi.CallableStatementSupport;
|
||||
|
@ -99,12 +104,12 @@ import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
|
|||
import org.hibernate.tool.schema.internal.StandardIndexExporter;
|
||||
import org.hibernate.tool.schema.internal.StandardSequenceExporter;
|
||||
import org.hibernate.tool.schema.internal.StandardTableExporter;
|
||||
import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter;
|
||||
import org.hibernate.tool.schema.internal.TemporaryTableExporter;
|
||||
import org.hibernate.tool.schema.spi.Exporter;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
|
@ -1980,6 +1985,7 @@ public abstract class Dialect implements ConversionContext {
|
|||
private StandardSequenceExporter sequenceExporter = new StandardSequenceExporter( this );
|
||||
private StandardIndexExporter indexExporter = new StandardIndexExporter( this );
|
||||
private StandardForeignKeyExporter foreignKeyExporter = new StandardForeignKeyExporter( this );
|
||||
private StandardUniqueKeyExporter uniqueKeyExporter = new StandardUniqueKeyExporter( this );
|
||||
private StandardAuxiliaryDatabaseObjectExporter auxiliaryObjectExporter = new StandardAuxiliaryDatabaseObjectExporter( this );
|
||||
private TemporaryTableExporter temporaryTableExporter = new TemporaryTableExporter( this );
|
||||
|
||||
|
@ -2003,6 +2009,10 @@ public abstract class Dialect implements ConversionContext {
|
|||
return foreignKeyExporter;
|
||||
}
|
||||
|
||||
public Exporter<Constraint> getUniqueKeyExporter() {
|
||||
return uniqueKeyExporter;
|
||||
}
|
||||
|
||||
public Exporter<AuxiliaryDatabaseObject> getAuxiliaryDatabaseObjectExporter() {
|
||||
return auxiliaryObjectExporter;
|
||||
}
|
||||
|
@ -2684,58 +2694,85 @@ public abstract class Dialect implements ConversionContext {
|
|||
* Get the UniqueDelegate supported by this dialect
|
||||
*
|
||||
* @return The UniqueDelegate
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public UniqueDelegate getUniqueDelegate() {
|
||||
return uniqueDelegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this dialect support the <tt>UNIQUE</tt> column syntax?
|
||||
*
|
||||
* @return boolean
|
||||
*
|
||||
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean supportsUnique() {
|
||||
return true;
|
||||
public String[] applyConstraints(Iterable<Table> tables, JdbcEnvironment jdbcEnvironment) {
|
||||
final List<String> sqlStrings = new ArrayList<String>();
|
||||
final List<String> uniqueExportIdentifiers = new ArrayList<String>();
|
||||
|
||||
for ( Table table : tables ) {
|
||||
if( !table.isPhysicalTable() ){
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: Some Dialects will need to create both the index and unique constraints. Audit them.
|
||||
|
||||
for ( Index index : table.getIndexes() ) {
|
||||
if (index.isUnique()) {
|
||||
sqlStrings.addAll(Arrays.asList( getUniqueKeyExporter().getSqlCreateStrings(
|
||||
index, jdbcEnvironment ) ) );
|
||||
uniqueExportIdentifiers.add( index.getColumnExportIdentifier() );
|
||||
}
|
||||
else {
|
||||
sqlStrings.addAll(Arrays.asList( getIndexExporter().getSqlCreateStrings(
|
||||
index, jdbcEnvironment ) ) );
|
||||
}
|
||||
}
|
||||
|
||||
for ( UniqueKey uniqueKey : table.getUniqueKeys() ) {
|
||||
// A unique Index may have already exported the constraint.
|
||||
if (! uniqueExportIdentifiers.contains( uniqueKey.getExportIdentifier() )) {
|
||||
sqlStrings.addAll(Arrays.asList( getUniqueKeyExporter().getSqlCreateStrings(
|
||||
uniqueKey, jdbcEnvironment ) ) );
|
||||
}
|
||||
uniqueExportIdentifiers.add( uniqueKey.getColumnExportIdentifier() );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return sqlStrings.toArray( new String[sqlStrings.size()] );
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this dialect support adding Unique constraints via create and alter table ?
|
||||
*
|
||||
* @return boolean
|
||||
*
|
||||
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean supportsUniqueConstraintInCreateAlterTable() {
|
||||
return true;
|
||||
}
|
||||
public String[] dropConstraints(Iterable<Table> tables, JdbcEnvironment jdbcEnvironment) {
|
||||
final List<String> sqlStrings = new ArrayList<String>();
|
||||
final List<String> uniqueExportIdentifiers = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* The syntax used to add a unique constraint to a table.
|
||||
*
|
||||
* @param constraintName The name of the unique constraint.
|
||||
* @return The "add unique" fragment
|
||||
*
|
||||
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public String getAddUniqueConstraintString(String constraintName) {
|
||||
return " add constraint " + constraintName + " unique ";
|
||||
}
|
||||
for ( Table table : tables ) {
|
||||
if( !table.isPhysicalTable() ){
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the combination of not-null and unique supported?
|
||||
*
|
||||
* @return deprecated
|
||||
*
|
||||
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean supportsNotNullUnique() {
|
||||
return true;
|
||||
if ( dropConstraints() ) {
|
||||
for ( Index index : table.getIndexes() ) {
|
||||
if (index.isUnique()) {
|
||||
sqlStrings.addAll(Arrays.asList( getUniqueKeyExporter().getSqlDropStrings(
|
||||
index, jdbcEnvironment ) ) );
|
||||
uniqueExportIdentifiers.add( index.getColumnExportIdentifier() );
|
||||
}
|
||||
else {
|
||||
sqlStrings.addAll(Arrays.asList( getIndexExporter().getSqlDropStrings(
|
||||
index, jdbcEnvironment ) ) );
|
||||
}
|
||||
}
|
||||
|
||||
for ( UniqueKey uniqueKey : table.getUniqueKeys() ) {
|
||||
// A unique Index may have already exported the constraint.
|
||||
if (! uniqueExportIdentifiers.contains( uniqueKey.getExportIdentifier() )) {
|
||||
sqlStrings.addAll(Arrays.asList( getUniqueKeyExporter().getSqlDropStrings(
|
||||
uniqueKey, jdbcEnvironment ) ) );
|
||||
}
|
||||
uniqueExportIdentifiers.add( uniqueKey.getColumnExportIdentifier() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sqlStrings.toArray( new String[sqlStrings.size()] );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -294,6 +294,7 @@ public class InformixDialect extends Dialect {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public UniqueDelegate getUniqueDelegate() {
|
||||
return uniqueDelegate;
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.hibernate.metamodel.spi.relational.UniqueKey;
|
|||
*
|
||||
* @author Brett Meyer
|
||||
*/
|
||||
@Deprecated
|
||||
public class DB2UniqueDelegate extends DefaultUniqueDelegate {
|
||||
/**
|
||||
* Constructs a DB2UniqueDelegate
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.hibernate.metamodel.spi.relational.UniqueKey;
|
|||
*
|
||||
* @author Brett Meyer
|
||||
*/
|
||||
@Deprecated
|
||||
public class DefaultUniqueDelegate implements UniqueDelegate {
|
||||
protected final Dialect dialect;
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.hibernate.metamodel.spi.relational.UniqueKey;
|
|||
*
|
||||
* @author Brett Meyer
|
||||
*/
|
||||
@Deprecated
|
||||
public class InformixUniqueDelegate extends DefaultUniqueDelegate {
|
||||
|
||||
public InformixUniqueDelegate( Dialect dialect ) {
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.hibernate.metamodel.spi.relational.UniqueKey;
|
|||
*
|
||||
* @author Brett Meyer
|
||||
*/
|
||||
@Deprecated
|
||||
public interface UniqueDelegate {
|
||||
/**
|
||||
* Get the fragment that can be used to make a column unique as part of its column definition.
|
||||
|
|
|
@ -1142,7 +1142,8 @@ public class Binder implements HelperContext {
|
|||
columns.add( tableHelper.locateOrCreateColumn( table, columnName,
|
||||
new ColumnNamingStrategyHelper( null, false ) ) );
|
||||
}
|
||||
tableHelper.createIndex( table, columns, constraintSource.name() );
|
||||
tableHelper.createIndex( table, columns, indexConstraintSource.name(),
|
||||
indexConstraintSource.isUnique() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -202,8 +202,9 @@ public class TableHelper {
|
|||
public void createIndex(
|
||||
final TableSpecification table,
|
||||
final List<Column> columns,
|
||||
final String name) {
|
||||
final Index idx = new Index();
|
||||
final String name,
|
||||
final boolean isUnique) {
|
||||
final Index idx = new Index(isUnique);
|
||||
for ( final Column column : columns ) {
|
||||
idx.addColumn( column );
|
||||
}
|
||||
|
|
|
@ -532,12 +532,8 @@ public class EntitySourceImpl implements EntitySource {
|
|||
}
|
||||
}
|
||||
|
||||
ConstraintSource constraintSource = null;
|
||||
if ( isUnique ) {
|
||||
constraintSource = new UniqueConstraintSourceImpl( name, tableName, columnNames, orderings );
|
||||
} else {
|
||||
constraintSource = new IndexConstraintSourceImpl( name, tableName, columnNames, orderings );
|
||||
}
|
||||
ConstraintSource constraintSource = new IndexConstraintSourceImpl(
|
||||
name, tableName, columnNames, orderings, isUnique );
|
||||
constraintSources.add( constraintSource );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,8 +32,12 @@ import org.hibernate.metamodel.spi.source.IndexConstraintSource;
|
|||
*/
|
||||
class IndexConstraintSourceImpl extends AbstractConstraintSource implements IndexConstraintSource {
|
||||
|
||||
public IndexConstraintSourceImpl(String name, String tableName, List<String> columnNames, List<String> orderings) {
|
||||
private final boolean unique;
|
||||
|
||||
public IndexConstraintSourceImpl(String name, String tableName, List<String> columnNames, List<String> orderings,
|
||||
boolean unique) {
|
||||
super( name, tableName, columnNames, orderings );
|
||||
this.unique = unique;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -47,6 +51,11 @@ class IndexConstraintSourceImpl extends AbstractConstraintSource implements Inde
|
|||
sb.append( '}' );
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUnique() {
|
||||
return unique;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -23,8 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.metamodel.internal.source.hbm;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.metamodel.spi.source.IndexConstraintSource;
|
||||
|
||||
/**
|
||||
|
@ -47,6 +45,12 @@ class IndexConstraintSourceImpl extends AbstractConstraintSource implements Inde
|
|||
sb.append( '}' );
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUnique() {
|
||||
// TODO: Is it possible to have a unique index in HBM?
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -24,13 +24,13 @@
|
|||
package org.hibernate.metamodel.spi.relational;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
|
||||
/**
|
||||
|
@ -45,6 +45,7 @@ import org.hibernate.internal.util.StringHelper;
|
|||
public abstract class AbstractConstraint implements Constraint {
|
||||
private TableSpecification table;
|
||||
private String name;
|
||||
// For #getExportIdentifier, alphabetical ordering is important.
|
||||
private final Map<Identifier, Column> columnMap = new LinkedHashMap<Identifier, Column>();
|
||||
private final Map<Column, String> columnOrderMap = new HashMap<Column, String>();
|
||||
|
||||
|
@ -123,6 +124,18 @@ public abstract class AbstractConstraint implements Constraint {
|
|||
return columnMap;
|
||||
}
|
||||
|
||||
public String getColumnExportIdentifier() {
|
||||
List<Identifier> columnNames = new ArrayList<Identifier>();
|
||||
columnNames.addAll( columnMap.keySet() );
|
||||
Collections.sort( columnNames );
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for ( Identifier columnName : columnNames ) {
|
||||
sb.append( '_' ).append( columnName.getText() );
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public void addColumn(Column column) {
|
||||
internalAddColumn( column );
|
||||
}
|
||||
|
@ -154,40 +167,4 @@ public abstract class AbstractConstraint implements Constraint {
|
|||
public String getOrdering(Column column) {
|
||||
return columnOrderMap.get( column );
|
||||
}
|
||||
|
||||
protected boolean isCreationVetoed(Dialect dialect) {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected abstract String sqlConstraintStringInAlterTable(Dialect dialect);
|
||||
|
||||
public String[] sqlDropStrings(Dialect dialect) {
|
||||
if ( isCreationVetoed( dialect ) ) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
return new String[] {
|
||||
new StringBuilder()
|
||||
.append( "alter table " )
|
||||
.append( getTable().getQualifiedName( dialect ) )
|
||||
.append( " drop constraint " )
|
||||
.append( dialect.quote( name ) )
|
||||
.toString()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public String[] sqlCreateStrings(Dialect dialect) {
|
||||
if ( isCreationVetoed( dialect ) ) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
return new String[] {
|
||||
new StringBuilder( "alter table " )
|
||||
.append( getTable().getQualifiedName( dialect ) )
|
||||
.append( sqlConstraintStringInAlterTable( dialect ) )
|
||||
.toString()
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,10 @@ import org.hibernate.sql.Template;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class Column extends AbstractValue {
|
||||
public static final int DEFAULT_LENGTH = 255;
|
||||
public static final int DEFAULT_PRECISION = 19;
|
||||
public static final int DEFAULT_SCALE = 2;
|
||||
|
||||
private final Identifier columnName;
|
||||
private boolean nullable = true;
|
||||
|
||||
|
|
|
@ -25,13 +25,9 @@ package org.hibernate.metamodel.spi.relational;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
|
@ -87,6 +83,19 @@ public class ForeignKey extends AbstractConstraint {
|
|||
: Collections.unmodifiableList( targetColumns );
|
||||
}
|
||||
|
||||
public boolean hasTargetColumn(String name) {
|
||||
for (Column column : targetColumns) {
|
||||
if (column.getColumnName().getText().equals( name )) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public int getTargetColumnSpan() {
|
||||
return targetColumns.size();
|
||||
}
|
||||
|
||||
protected int generateConstraintColumnListId() {
|
||||
return 31 * super.generateConstraintColumnListId() + targetTable.generateColumnListId( getTargetColumns() );
|
||||
}
|
||||
|
@ -152,61 +161,6 @@ public class ForeignKey extends AbstractConstraint {
|
|||
this.updateRule = updateRule;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] sqlDropStrings(Dialect dialect) {
|
||||
final StringBuilder buf = new StringBuilder( "alter table " );
|
||||
buf.append( getTable().getQualifiedName( dialect ) );
|
||||
buf.append( dialect.getDropForeignKeyString() );
|
||||
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
|
||||
buf.append( "if exists " );
|
||||
}
|
||||
buf.append( getName() );
|
||||
if ( dialect.supportsIfExistsAfterConstraintName() ) {
|
||||
buf.append( " if exists" );
|
||||
}
|
||||
return new String[] { buf.toString() };
|
||||
}
|
||||
|
||||
public String sqlConstraintStringInAlterTable(Dialect dialect) {
|
||||
String[] columnNames = new String[ getColumnSpan() ];
|
||||
String[] targetColumnNames = new String[ getColumnSpan() ];
|
||||
int i=0;
|
||||
Iterator<Column> itTargetColumn = getTargetColumns().iterator();
|
||||
for ( Column column : getColumns() ) {
|
||||
if ( ! itTargetColumn.hasNext() ) {
|
||||
throw new MappingException( "More constraint columns that foreign key target columns." );
|
||||
}
|
||||
columnNames[i] = column.getColumnName().getText( dialect );
|
||||
targetColumnNames[i] = ( itTargetColumn.next() ).getColumnName().getText( dialect );
|
||||
i++;
|
||||
}
|
||||
if ( itTargetColumn.hasNext() ) {
|
||||
throw new MappingException( "More foreign key target columns than constraint columns." );
|
||||
}
|
||||
StringBuilder sb =
|
||||
new StringBuilder(
|
||||
dialect.getAddForeignKeyConstraintString(
|
||||
getName(),
|
||||
columnNames,
|
||||
targetTable.getQualifiedName( dialect ),
|
||||
targetColumnNames,
|
||||
this.targetColumns == null ||
|
||||
this.targetColumns.equals( targetTable.getPrimaryKey().getColumns() )
|
||||
)
|
||||
);
|
||||
// TODO: If a dialect does not support cascade-delete, can it support other actions? (HHH-6428)
|
||||
// For now, assume not.
|
||||
if ( dialect.supportsCascadeDelete() ) {
|
||||
if ( deleteRule != ReferentialAction.NO_ACTION ) {
|
||||
sb.append( ON_DELETE ).append( deleteRule.getActionString() );
|
||||
}
|
||||
if ( updateRule != ReferentialAction.NO_ACTION ) {
|
||||
sb.append( ON_UPDATE ).append( updateRule.getActionString() );
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static enum ReferentialAction {
|
||||
NO_ACTION( "no action" ),
|
||||
CASCADE( "cascade" ),
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.hibernate.internal.util.StringHelper;
|
|||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class Identifier {
|
||||
public class Identifier implements Comparable<Identifier> {
|
||||
private final String text;
|
||||
private final boolean isQuoted;
|
||||
|
||||
|
@ -186,4 +186,9 @@ public class Identifier {
|
|||
public int hashCode() {
|
||||
return isQuoted ? text.hashCode() : text.toUpperCase().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Identifier o) {
|
||||
return text.compareTo( o.getText() );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.metamodel.spi.relational;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
/**
|
||||
* Models a SQL <tt>INDEX</tt>
|
||||
|
@ -33,31 +32,15 @@ import org.hibernate.dialect.Dialect;
|
|||
*/
|
||||
public class Index extends AbstractConstraint {
|
||||
|
||||
public Index() {
|
||||
this( null, null );
|
||||
private final boolean unique;
|
||||
|
||||
public Index(boolean unique) {
|
||||
this( null, null, unique );
|
||||
}
|
||||
|
||||
protected Index(Table table, String name) {
|
||||
protected Index(Table table, String name, boolean unique) {
|
||||
super( table, name );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sqlConstraintStringInAlterTable(Dialect dialect) {
|
||||
StringBuilder buf = new StringBuilder( " index (" );
|
||||
boolean first = true;
|
||||
for ( Column column : getColumns() ) {
|
||||
if ( first ) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
buf.append( ", " );
|
||||
}
|
||||
buf.append( column.getColumnName().getText( dialect ) );
|
||||
if ( hasOrdering( column ) ) {
|
||||
buf.append( " " ).append( getOrdering( column ) );
|
||||
}
|
||||
}
|
||||
return buf.append( ')' ).toString();
|
||||
this.unique = unique;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -70,4 +53,8 @@ public class Index extends AbstractConstraint {
|
|||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public boolean isUnique() {
|
||||
return unique;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ public class PrimaryKey extends AbstractConstraint {
|
|||
super( table, null );
|
||||
}
|
||||
|
||||
// TODO: Can this be removed?
|
||||
public String sqlConstraintStringInCreateTable(Dialect dialect) {
|
||||
StringBuilder buf = new StringBuilder("primary key (");
|
||||
boolean first = true;
|
||||
|
@ -55,23 +56,6 @@ public class PrimaryKey extends AbstractConstraint {
|
|||
return buf.append(')').toString();
|
||||
}
|
||||
|
||||
public String sqlConstraintStringInAlterTable(Dialect dialect) {
|
||||
StringBuilder buf = new StringBuilder(
|
||||
dialect.getAddPrimaryKeyConstraintString( getName() )
|
||||
).append('(');
|
||||
boolean first = true;
|
||||
for ( Column column : getColumns() ) {
|
||||
if ( first ) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
buf.append(", ");
|
||||
}
|
||||
buf.append( column.getColumnName().getText( dialect ) );
|
||||
}
|
||||
return buf.append(')').toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getExportIdentifier() {
|
||||
return getTable().getLoggableValueQualifier() + ".PK";
|
||||
|
|
|
@ -264,11 +264,6 @@ public class Table extends AbstractTableSpecification implements Exportable {
|
|||
String nullablePostfix = column.isNullable() ? dialect.getNullColumnString() : " not null";
|
||||
alter.append( nullablePostfix );
|
||||
|
||||
if ( hasUniqueKey(column) ) {
|
||||
alter.append( dialect.getUniqueDelegate().getColumnDefinitionUniquenessFragment(
|
||||
column ) );
|
||||
}
|
||||
|
||||
final String checkCondition = column.getCheckCondition();
|
||||
if ( checkCondition != null && dialect.supportsColumnCheck() ) {
|
||||
alter.append( " check(" )
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.metamodel.spi.relational;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
/**
|
||||
* Models a SQL <tt>INDEX</tt> defined as UNIQUE
|
||||
|
@ -42,22 +41,6 @@ public class UniqueKey extends AbstractConstraint {
|
|||
super( table, name );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] sqlCreateStrings(Dialect dialect) {
|
||||
return new String[] { dialect.getUniqueDelegate().getAlterTableToAddUniqueKeyCommand( this ) };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] sqlDropStrings(Dialect dialect) {
|
||||
return new String[] { dialect.getUniqueDelegate().getAlterTableToDropUniqueKeyCommand( this ) };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sqlConstraintStringInAlterTable(Dialect dialect) {
|
||||
// not used
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getExportIdentifier() {
|
||||
StringBuilder sb = new StringBuilder( getTable().getLoggableValueQualifier() );
|
||||
|
|
|
@ -29,4 +29,5 @@ package org.hibernate.metamodel.spi.source;
|
|||
* @author Brett Meyer
|
||||
*/
|
||||
public interface IndexConstraintSource extends ConstraintSource {
|
||||
public boolean isUnique();
|
||||
}
|
||||
|
|
|
@ -33,12 +33,10 @@ import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject;
|
|||
import org.hibernate.metamodel.spi.relational.Database;
|
||||
import org.hibernate.metamodel.spi.relational.Exportable;
|
||||
import org.hibernate.metamodel.spi.relational.ForeignKey;
|
||||
import org.hibernate.metamodel.spi.relational.Index;
|
||||
import org.hibernate.metamodel.spi.relational.InitCommand;
|
||||
import org.hibernate.metamodel.spi.relational.Schema;
|
||||
import org.hibernate.metamodel.spi.relational.Sequence;
|
||||
import org.hibernate.metamodel.spi.relational.Table;
|
||||
import org.hibernate.metamodel.spi.relational.UniqueKey;
|
||||
import org.hibernate.tool.schema.spi.SchemaCreator;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
||||
import org.hibernate.tool.schema.spi.Target;
|
||||
|
@ -100,17 +98,6 @@ public class SchemaCreatorImpl implements SchemaCreator {
|
|||
checkExportIdentifier( table, exportIdentifiers );
|
||||
applySqlStrings( targets, dialect.getTableExporter().getSqlCreateStrings( table, jdbcEnvironment ) );
|
||||
|
||||
for ( Index index : table.getIndexes() ) {
|
||||
checkExportIdentifier( index, exportIdentifiers );
|
||||
applySqlStrings( targets, dialect.getIndexExporter().getSqlCreateStrings( index, jdbcEnvironment ) );
|
||||
}
|
||||
|
||||
for ( UniqueKey uniqueKey : table.getUniqueKeys() ) {
|
||||
checkExportIdentifier( uniqueKey, exportIdentifiers );
|
||||
applySqlStrings( targets, dialect.getUniqueDelegate()
|
||||
.getAlterTableToAddUniqueKeyCommand( uniqueKey ) );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for ( Table table : schema.getTables() ) {
|
||||
|
@ -130,6 +117,8 @@ public class SchemaCreatorImpl implements SchemaCreator {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
applySqlStrings( targets, dialect.applyConstraints( schema.getTables(), jdbcEnvironment ) );
|
||||
}
|
||||
|
||||
// next, create all "after table" auxiliary objects
|
||||
|
|
|
@ -33,11 +33,9 @@ import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject;
|
|||
import org.hibernate.metamodel.spi.relational.Database;
|
||||
import org.hibernate.metamodel.spi.relational.Exportable;
|
||||
import org.hibernate.metamodel.spi.relational.ForeignKey;
|
||||
import org.hibernate.metamodel.spi.relational.Index;
|
||||
import org.hibernate.metamodel.spi.relational.Schema;
|
||||
import org.hibernate.metamodel.spi.relational.Sequence;
|
||||
import org.hibernate.metamodel.spi.relational.Table;
|
||||
import org.hibernate.metamodel.spi.relational.UniqueKey;
|
||||
import org.hibernate.tool.schema.spi.SchemaDropper;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementException;
|
||||
import org.hibernate.tool.schema.spi.Target;
|
||||
|
@ -79,10 +77,14 @@ public class SchemaDropperImpl implements SchemaDropper {
|
|||
}
|
||||
|
||||
for ( Schema schema : database.getSchemas() ) {
|
||||
// we need to drop constraints prior to dropping table
|
||||
applySqlStrings( targets, dialect.dropConstraints( schema.getTables(), jdbcEnvironment ) );
|
||||
|
||||
for ( Table table : schema.getTables() ) {
|
||||
if( !table.isPhysicalTable() ){
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( dialect.dropConstraints() ) {
|
||||
// we need to drop constraints prior to dropping table
|
||||
|
||||
|
@ -104,17 +106,6 @@ public class SchemaDropperImpl implements SchemaDropper {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ( UniqueKey uniqueKey : table.getUniqueKeys() ) {
|
||||
checkExportIdentifier( uniqueKey, exportIdentifiers );
|
||||
applySqlStrings( targets, dialect.getUniqueDelegate()
|
||||
.getAlterTableToDropUniqueKeyCommand( uniqueKey ) );
|
||||
}
|
||||
|
||||
for ( Index index : table.getIndexes() ) {
|
||||
checkExportIdentifier( index, exportIdentifiers );
|
||||
applySqlStrings( targets, dialect.getIndexExporter().getSqlDropStrings( index, jdbcEnvironment ) );
|
||||
}
|
||||
}
|
||||
|
||||
checkExportIdentifier( table, exportIdentifiers );
|
||||
|
|
|
@ -43,9 +43,6 @@ public class StandardIndexExporter implements Exporter<Index> {
|
|||
|
||||
@Override
|
||||
public String[] getSqlCreateStrings(Index index, JdbcEnvironment jdbcEnvironment) {
|
||||
// todo : pass in JdbcEnvironment so we can access naming helpers...
|
||||
// specifically here, StringHelper.unqualify can be incorrect.
|
||||
|
||||
final String tableName = jdbcEnvironment.getQualifiedObjectNameSupport().formatName(
|
||||
( (Table) index.getTable() ).getTableName()
|
||||
);
|
||||
|
|
|
@ -101,10 +101,6 @@ public class StandardTableExporter implements Exporter<Table> {
|
|||
}
|
||||
|
||||
}
|
||||
//only create unique constraint for non-pk column
|
||||
if ( table.hasUniqueKey( col ) && !colName.equals( pkColName )) {
|
||||
buf.append( dialect.getUniqueDelegate().getColumnDefinitionUniquenessFragment( col ) );
|
||||
}
|
||||
|
||||
if ( col.getCheckCondition() != null && dialect.supportsColumnCheck() ) {
|
||||
buf.append( " check (" )
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.tool.schema.internal;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||
import org.hibernate.metamodel.spi.relational.Column;
|
||||
import org.hibernate.metamodel.spi.relational.Constraint;
|
||||
import org.hibernate.metamodel.spi.relational.Table;
|
||||
import org.hibernate.tool.schema.spi.Exporter;
|
||||
|
||||
/**
|
||||
* Unique constraint Exporter. Note that it's parameterized for Constraint, rather than UniqueKey. This is
|
||||
* to allow Dialects to decide whether or not to create unique constraints for unique indexes.
|
||||
*
|
||||
* @author Brett Meyer
|
||||
*/
|
||||
public class StandardUniqueKeyExporter implements Exporter<Constraint> {
|
||||
private final Dialect dialect;
|
||||
|
||||
public StandardUniqueKeyExporter(Dialect dialect) {
|
||||
this.dialect = dialect;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getSqlCreateStrings(Constraint constraint, JdbcEnvironment jdbcEnvironment) {
|
||||
if ( ! dialect.hasAlterTable() ) {
|
||||
return NO_COMMANDS;
|
||||
}
|
||||
|
||||
final String tableName = jdbcEnvironment.getQualifiedObjectNameSupport().formatName(
|
||||
( (Table) constraint.getTable() ).getTableName()
|
||||
);
|
||||
StringBuilder sb = new StringBuilder()
|
||||
.append( "alter table " )
|
||||
.append( tableName )
|
||||
.append( " add constraint " )
|
||||
.append( constraint.getName() )
|
||||
.append( " unique ( " );
|
||||
|
||||
final Iterator columnIterator = constraint.getColumns().iterator();
|
||||
while ( columnIterator.hasNext() ) {
|
||||
Column column = (Column) columnIterator.next();
|
||||
sb.append( column.getColumnName().getText( dialect ) );
|
||||
if ( columnIterator.hasNext() ) {
|
||||
sb.append( ", " );
|
||||
}
|
||||
}
|
||||
sb.append( ")" );
|
||||
return new String[] { sb.toString() };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getSqlDropStrings(Constraint constraint, JdbcEnvironment jdbcEnvironment) {
|
||||
if ( ! dialect.dropConstraints() ) {
|
||||
return NO_COMMANDS;
|
||||
}
|
||||
|
||||
final String tableName = jdbcEnvironment.getQualifiedObjectNameSupport().formatName(
|
||||
( (Table) constraint.getTable() ).getTableName()
|
||||
);
|
||||
final StringBuilder sb = new StringBuilder( "alter table " );
|
||||
sb.append( tableName );
|
||||
sb.append(" drop constraint " );
|
||||
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
|
||||
sb.append( "if exists " );
|
||||
}
|
||||
sb.append( dialect.quote( constraint.getName() ) );
|
||||
if ( dialect.supportsIfExistsAfterConstraintName() ) {
|
||||
sb.append( " if exists" );
|
||||
}
|
||||
return new String[] { sb.toString() };
|
||||
}
|
||||
}
|
|
@ -83,8 +83,8 @@ public class UniqueConstraintBindingTest extends BaseAnnotationBindingTestCase {
|
|||
assertEquals( ukName, key.getName() );
|
||||
assertTrue( table == key.getTable() );
|
||||
assertNotNull( key.getColumns() );
|
||||
assertEquals( "There should be two columns in the unique constraint", ukNumColumns, key.getColumns().size() );
|
||||
assertEquals( "There should be two columns in the unique constraint", ukNumColumns, key.getColumnSpan() );
|
||||
assertEquals( "There should be " + ukNumColumns + " columns in the unique constraint", ukNumColumns, key.getColumns().size() );
|
||||
assertEquals( "There should be " + ukNumColumns + " columns in the unique constraint", ukNumColumns, key.getColumnSpan() );
|
||||
}
|
||||
assertEquals( "There should only be one unique constraint", 1, i );
|
||||
}
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.hibernate.cfg.AvailableSettings;
|
|||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.internal.SessionFactoryRegistry;
|
||||
import org.hibernate.internal.util.SerializationHelper;
|
||||
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
|
||||
import org.hibernate.testing.junit4.BaseUnitTestCase;
|
||||
import org.hibernate.type.SerializationException;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -23,21 +23,21 @@
|
|||
*/
|
||||
package org.hibernate.test.annotations.index.jpa;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.metamodel.spi.binding.EntityBinding;
|
||||
import org.hibernate.metamodel.spi.relational.TableSpecification;
|
||||
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
import org.hibernate.test.util.SchemaUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertSame;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.metamodel.spi.binding.EntityBinding;
|
||||
import org.hibernate.metamodel.spi.relational.Column;
|
||||
import org.hibernate.metamodel.spi.relational.TableSpecification;
|
||||
import org.hibernate.test.util.SchemaUtil;
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* @author Strong Liu <stliu@hibernate.org>
|
||||
*/
|
||||
|
@ -45,28 +45,30 @@ public abstract class AbstractJPAIndexTest extends BaseCoreFunctionalTestCase {
|
|||
@Test
|
||||
public void testTableIndex() {
|
||||
TableSpecification table = SchemaUtil.getTable( Car.class, metadata() );
|
||||
Iterator<org.hibernate.metamodel.spi.relational.UniqueKey> uniqueKeys = table.getUniqueKeys().iterator();
|
||||
assertTrue( uniqueKeys.hasNext() );
|
||||
org.hibernate.metamodel.spi.relational.UniqueKey uk = uniqueKeys.next();
|
||||
assertFalse( uniqueKeys.hasNext() );
|
||||
assertTrue( StringHelper.isNotEmpty( uk.getName() ) );
|
||||
assertEquals( 2, uk.getColumnSpan() );
|
||||
org.hibernate.metamodel.spi.relational.Column column = uk.getColumns().get( 0 );
|
||||
assertEquals( "brand", column.getColumnName().getText() );
|
||||
column = uk.getColumns().get( 1 );
|
||||
assertEquals( "producer", column.getColumnName().getText() );
|
||||
assertSame( table, uk.getTable() );
|
||||
|
||||
|
||||
Iterator<org.hibernate.metamodel.spi.relational.Index> indexes = table.getIndexes().iterator();
|
||||
assertTrue( indexes.hasNext() );
|
||||
org.hibernate.metamodel.spi.relational.Index index = indexes.next();
|
||||
assertFalse( indexes.hasNext() );
|
||||
assertEquals( "Car_idx", index.getName() );
|
||||
assertEquals( 1, index.getColumnSpan() );
|
||||
column = index.getColumns().get( 0 );
|
||||
assertEquals( "since", column.getColumnName().getText() );
|
||||
assertSame( table, index.getTable() );
|
||||
int found = 0;
|
||||
while (indexes.hasNext()) {
|
||||
org.hibernate.metamodel.spi.relational.Index index = indexes.next();
|
||||
if (index.isUnique()) {
|
||||
found++;
|
||||
assertTrue( StringHelper.isNotEmpty( index.getName() ) );
|
||||
assertEquals( 2, index.getColumnSpan() );
|
||||
Column column = index.getColumns().get( 0 );
|
||||
assertEquals( "brand", column.getColumnName().getText() );
|
||||
column = index.getColumns().get( 1 );
|
||||
assertEquals( "producer", column.getColumnName().getText() );
|
||||
}
|
||||
else {
|
||||
found++;
|
||||
assertEquals( "Car_idx", index.getName() );
|
||||
assertEquals( 1, index.getColumnSpan() );
|
||||
Column column = index.getColumns().get( 0 );
|
||||
assertEquals( "since", column.getColumnName().getText() );
|
||||
}
|
||||
assertSame( table, index.getTable() );
|
||||
}
|
||||
assertEquals( 2, found );
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -23,32 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.test.annotations.index.jpa;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.mapping.Bag;
|
||||
import org.hibernate.mapping.Column;
|
||||
import org.hibernate.mapping.Index;
|
||||
import org.hibernate.mapping.Join;
|
||||
import org.hibernate.mapping.List;
|
||||
import org.hibernate.mapping.PersistentClass;
|
||||
import org.hibernate.mapping.Property;
|
||||
import org.hibernate.mapping.Set;
|
||||
import org.hibernate.mapping.Table;
|
||||
import org.hibernate.mapping.UniqueKey;
|
||||
import org.hibernate.mapping.Value;
|
||||
import org.hibernate.test.annotations.embedded.Address;
|
||||
import org.hibernate.test.annotations.embedded.AddressType;
|
||||
import org.hibernate.test.annotations.embedded.Book;
|
||||
import org.hibernate.test.annotations.embedded.Person;
|
||||
import org.hibernate.test.annotations.embedded.Summary;
|
||||
import org.hibernate.test.annotations.embedded.WealthyPerson;
|
||||
import org.hibernate.test.event.collection.detached.*;
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
|
||||
/**
|
||||
* @author Strong Liu <stliu@hibernate.org>
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.junit.Test;
|
|||
/**
|
||||
* @author Sharath Reddy
|
||||
*/
|
||||
@FailureExpectedWithNewMetamodel
|
||||
public class ManyToOneWithFormulaTest extends BaseCoreFunctionalTestCase {
|
||||
@Test
|
||||
public void testManyToOneFromNonPk() throws Exception {
|
||||
|
|
|
@ -20,7 +20,10 @@
|
|||
*/
|
||||
package org.hibernate.test.constraint;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import javax.persistence.CollectionTable;
|
||||
import javax.persistence.ElementCollection;
|
||||
import javax.persistence.Entity;
|
||||
|
@ -34,9 +37,10 @@ import javax.persistence.OneToOne;
|
|||
import javax.persistence.Table;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
|
||||
import org.hibernate.cfg.NotYetImplementedException;
|
||||
|
||||
import org.hibernate.testing.FailureExpected;
|
||||
import org.hibernate.metamodel.spi.relational.ForeignKey;
|
||||
import org.hibernate.metamodel.spi.relational.TableSpecification;
|
||||
import org.hibernate.metamodel.spi.relational.UniqueKey;
|
||||
import org.hibernate.test.util.SchemaUtil;
|
||||
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
|
||||
|
@ -72,88 +76,34 @@ public class ConstraintTest extends BaseCoreFunctionalTestCase {
|
|||
|
||||
@Test
|
||||
@TestForIssue( jiraKey = "HHH-7797" )
|
||||
@FailureExpectedWithNewMetamodel
|
||||
public void testUniqueConstraints() {
|
||||
if ( isMetadataUsed() ) {
|
||||
throw new NotYetImplementedException( "Test case does not work with new metamodel yet." );
|
||||
}
|
||||
// Column column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
|
||||
// .getProperty( "foo1" ).getColumnIterator().next();
|
||||
// assertFalse( column.isNullable() );
|
||||
// assertTrue( column.isUnique() );
|
||||
//
|
||||
// column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
|
||||
// .getProperty( "foo2" ).getColumnIterator().next();
|
||||
// assertTrue( column.isNullable() );
|
||||
// assertTrue( column.isUnique() );
|
||||
//
|
||||
// column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
|
||||
// .getProperty( "id" ).getColumnIterator().next();
|
||||
// assertFalse( column.isNullable() );
|
||||
// assertTrue( column.isUnique() );
|
||||
TableSpecification table = SchemaUtil.getTable( DataPoint.class, metadata() );
|
||||
assertTrue( SchemaUtil.hasUniqueKey( table, "foo" ) );
|
||||
}
|
||||
|
||||
@Test
|
||||
@FailureExpected(jiraKey = "HHH-8862")
|
||||
@FailureExpectedWithNewMetamodel
|
||||
public void testConstraintNames() {
|
||||
if ( isMetadataUsed() ) {
|
||||
throw new NotYetImplementedException( "Test case does not work with new metamodel yet." );
|
||||
TableSpecification table1 = SchemaUtil.getTable( DataPoint.class, metadata() );
|
||||
assertTrue( SchemaUtil.hasUniqueKey( table1, EXPLICIT_UK_NAME, "explicit" ) );
|
||||
|
||||
TableSpecification table2 = SchemaUtil.getTable( DataPoint.class, metadata() );
|
||||
assertTrue( SchemaUtil.hasForeignKey( table2, EXPLICIT_FK_NAME_NATIVE, EXPLICIT_COLUMN_NAME_NATIVE ) );
|
||||
assertTrue( SchemaUtil.hasForeignKey( table2, EXPLICIT_FK_NAME_JPA_O2O, EXPLICIT_COLUMN_NAME_JPA_O2O ) );
|
||||
assertTrue( SchemaUtil.hasForeignKey( table2, EXPLICIT_FK_NAME_JPA_M2O, EXPLICIT_COLUMN_NAME_JPA_M2O ) );
|
||||
assertTrue( SchemaUtil.hasForeignKey( table2, EXPLICIT_FK_NAME_JPA_M2M, EXPLICIT_COLUMN_NAME_JPA_M2M ) );
|
||||
assertTrue( SchemaUtil.hasForeignKey( table2, EXPLICIT_FK_NAME_JPA_ELEMENT, EXPLICIT_COLUMN_NAME_JPA_ELEMENT ) );
|
||||
|
||||
testConstraintLength( table1 );
|
||||
testConstraintLength( table2 );
|
||||
}
|
||||
|
||||
private void testConstraintLength(TableSpecification table) {
|
||||
for (UniqueKey uk : table.getUniqueKeys()) {
|
||||
assertTrue(uk.getName().length() <= MAX_NAME_LENGTH);
|
||||
}
|
||||
for (ForeignKey fk : table.getForeignKeys()) {
|
||||
assertTrue(fk.getName().length() <= MAX_NAME_LENGTH);
|
||||
}
|
||||
// Iterator<org.hibernate.mapping.Table> tableItr = configuration().getTableMappings();
|
||||
// int foundCount = 0;
|
||||
// while (tableItr.hasNext()) {
|
||||
// org.hibernate.mapping.Table table = tableItr.next();
|
||||
//
|
||||
// Iterator fkItr = table.getForeignKeyIterator();
|
||||
// while (fkItr.hasNext()) {
|
||||
// ForeignKey fk = (ForeignKey) fkItr.next();
|
||||
// assertTrue( fk.getName().length() <= MAX_NAME_LENGTH );
|
||||
//
|
||||
// // ensure the randomly generated constraint name doesn't
|
||||
// // happen if explicitly given
|
||||
// Iterator<Column> cItr = fk.columnIterator();
|
||||
// while (cItr.hasNext()) {
|
||||
// Column column = cItr.next();
|
||||
// if ( column.getName().equals( EXPLICIT_COLUMN_NAME_NATIVE ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( fk.getName(), EXPLICIT_FK_NAME_NATIVE );
|
||||
// }
|
||||
// else if ( column.getName().equals( EXPLICIT_COLUMN_NAME_JPA_O2O ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( fk.getName(), EXPLICIT_FK_NAME_JPA_O2O );
|
||||
// }
|
||||
// else if ( column.getName().equals( EXPLICIT_COLUMN_NAME_JPA_M2O ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( fk.getName(), EXPLICIT_FK_NAME_JPA_M2O );
|
||||
// }
|
||||
// else if ( column.getName().equals( EXPLICIT_COLUMN_NAME_JPA_M2M ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( fk.getName(), EXPLICIT_FK_NAME_JPA_M2M );
|
||||
// }
|
||||
// else if ( column.getName().equals( EXPLICIT_COLUMN_NAME_JPA_ELEMENT ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( fk.getName(), EXPLICIT_FK_NAME_JPA_ELEMENT );
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Iterator ukItr = table.getUniqueKeyIterator();
|
||||
// while (ukItr.hasNext()) {
|
||||
// UniqueKey uk = (UniqueKey) ukItr.next();
|
||||
// assertTrue( uk.getName().length() <= MAX_NAME_LENGTH );
|
||||
//
|
||||
// // ensure the randomly generated constraint name doesn't
|
||||
// // happen if explicitly given
|
||||
// Column column = uk.getColumn( 0 );
|
||||
// if ( column.getName().equals( "explicit" ) ) {
|
||||
// foundCount++;
|
||||
// assertEquals( uk.getName(), EXPLICIT_UK_NAME );
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// assertEquals("Could not find the necessary columns.", 5, foundCount);
|
||||
}
|
||||
|
||||
@Entity
|
||||
|
@ -163,14 +113,10 @@ public class ConstraintTest extends BaseCoreFunctionalTestCase {
|
|||
public static class DataPoint {
|
||||
@Id
|
||||
@GeneratedValue
|
||||
@javax.persistence.Column( nullable = false, unique = true)
|
||||
public long id;
|
||||
|
||||
@javax.persistence.Column( nullable = false, unique = true)
|
||||
public String foo1;
|
||||
|
||||
@javax.persistence.Column( nullable = true, unique = true)
|
||||
public String foo2;
|
||||
@javax.persistence.Column( unique = true)
|
||||
public String foo;
|
||||
|
||||
public String explicit;
|
||||
}
|
||||
|
|
|
@ -109,6 +109,13 @@ public class SchemaUtil {
|
|||
return collection.getPluralAttributeKeyBinding().getCollectionTable();
|
||||
}
|
||||
|
||||
/**
|
||||
* Do all of the given columns have associated UKs?
|
||||
*
|
||||
* @param table
|
||||
* @param columnNames
|
||||
* @return
|
||||
*/
|
||||
public static boolean hasUniqueKeys(TableSpecification table, String... columnNames) {
|
||||
for ( String columnName : columnNames ) {
|
||||
if ( !table.hasUniqueKey( table.locateColumn( columnName ) ) ) {
|
||||
|
@ -127,15 +134,42 @@ public class SchemaUtil {
|
|||
return false;
|
||||
}
|
||||
|
||||
public static boolean hasForeignKey(TableSpecification table, String keyName) {
|
||||
for ( ForeignKey fk : table.getForeignKeys() ) {
|
||||
if ( fk.getName().equals( keyName ) ) {
|
||||
return true;
|
||||
/**
|
||||
* Does a unique key exist with the given keyName containing the given columnNames *exclusively*?
|
||||
*
|
||||
* @param table
|
||||
* @param keyName
|
||||
* @param columnNames
|
||||
* @return
|
||||
*/
|
||||
public static boolean hasUniqueKey(TableSpecification table, String keyName, String... columnNames) {
|
||||
for ( UniqueKey uk : table.getUniqueKeys() ) {
|
||||
if ( uk.getName().equals( keyName ) ) {
|
||||
for (String columnName : columnNames) {
|
||||
if (!uk.hasColumn( columnName )) {
|
||||
return false;
|
||||
}
|
||||
return columnNames.length == uk.getColumnSpan();
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static boolean hasForeignKey(TableSpecification table, String keyName) {
|
||||
return table.locateForeignKey( keyName ) != null;
|
||||
}
|
||||
|
||||
public static boolean hasForeignKey(TableSpecification table, String keyName, String... targetColumnNames) {
|
||||
ForeignKey fk = table.locateForeignKey( keyName );
|
||||
for (String targetColumnName : targetColumnNames) {
|
||||
if (!fk.hasTargetColumn( targetColumnName )) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public static boolean hasIndex(TableSpecification table, String indexName) {
|
||||
for ( Index index : table.getIndexes() ) {
|
||||
if ( index.getName().equals( indexName ) ) {
|
||||
|
|
Loading…
Reference in New Issue