HHH-7725 - Make handling multi-table bulk HQL operations more pluggable

(cherry picked from commit 3e69b7bd53)
This commit is contained in:
Steve Ebersole 2012-10-29 16:29:14 -05:00
parent 0ef3a25d15
commit 2729d462c7
17 changed files with 1131 additions and 462 deletions

View File

@ -72,7 +72,7 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
* @param session The session to which this request is tied. * @param session The session to which this request is tied.
* @param affectedQueryables The affected entity persisters. * @param affectedQueryables The affected entity persisters.
*/ */
public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) { public BulkOperationCleanupAction(SessionImplementor session, Queryable... affectedQueryables) {
SessionFactoryImplementor factory = session.getFactory(); SessionFactoryImplementor factory = session.getFactory();
LinkedHashSet<String> spacesList = new LinkedHashSet<String>(); LinkedHashSet<String> spacesList = new LinkedHashSet<String>();
for ( Queryable persister : affectedQueryables ) { for ( Queryable persister : affectedQueryables ) {

View File

@ -594,4 +594,6 @@ public interface AvailableSettings {
public static final String FORCE_DISCRIMINATOR_IN_SELECTS_BY_DEFAULT = "hibernate.discriminator.force_in_select"; public static final String FORCE_DISCRIMINATOR_IN_SELECTS_BY_DEFAULT = "hibernate.discriminator.force_in_select";
public static final String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans"; public static final String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans";
public static final String HQL_BULK_ID_STRATEGY = "hibernate.hql.bulk_id_strategy";
} }

View File

@ -30,6 +30,7 @@ import org.hibernate.EntityMode;
import org.hibernate.MultiTenancyStrategy; import org.hibernate.MultiTenancyStrategy;
import org.hibernate.cache.spi.QueryCacheFactory; import org.hibernate.cache.spi.QueryCacheFactory;
import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.spi.QueryTranslatorFactory; import org.hibernate.hql.spi.QueryTranslatorFactory;
import org.hibernate.service.jta.platform.spi.JtaPlatform; import org.hibernate.service.jta.platform.spi.JtaPlatform;
import org.hibernate.tuple.entity.EntityTuplizerFactory; import org.hibernate.tuple.entity.EntityTuplizerFactory;
@ -89,6 +90,9 @@ public final class Settings {
private JtaPlatform jtaPlatform; private JtaPlatform jtaPlatform;
private MultiTableBulkIdStrategy multiTableBulkIdStrategy;
/** /**
* Package protected constructor * Package protected constructor
*/ */
@ -468,4 +472,12 @@ public final class Settings {
void setInitializeLazyStateOutsideTransactions(boolean initializeLazyStateOutsideTransactions) { void setInitializeLazyStateOutsideTransactions(boolean initializeLazyStateOutsideTransactions) {
this.initializeLazyStateOutsideTransactions = initializeLazyStateOutsideTransactions; this.initializeLazyStateOutsideTransactions = initializeLazyStateOutsideTransactions;
} }
public MultiTableBulkIdStrategy getMultiTableBulkIdStrategy() {
return multiTableBulkIdStrategy;
}
void setMultiTableBulkIdStrategy(MultiTableBulkIdStrategy multiTableBulkIdStrategy) {
this.multiTableBulkIdStrategy = multiTableBulkIdStrategy;
}
} }

View File

@ -33,6 +33,7 @@ import org.hibernate.ConnectionReleaseMode;
import org.hibernate.EntityMode; import org.hibernate.EntityMode;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.MultiTenancyStrategy; import org.hibernate.MultiTenancyStrategy;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.internal.NoCachingRegionFactory; import org.hibernate.cache.internal.NoCachingRegionFactory;
import org.hibernate.cache.internal.RegionFactoryInitiator; import org.hibernate.cache.internal.RegionFactoryInitiator;
import org.hibernate.cache.internal.StandardQueryCacheFactory; import org.hibernate.cache.internal.StandardQueryCacheFactory;
@ -41,7 +42,10 @@ import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.engine.jdbc.spi.ExtractedDatabaseMetaData; import org.hibernate.engine.jdbc.spi.ExtractedDatabaseMetaData;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.transaction.spi.TransactionFactory; import org.hibernate.engine.transaction.spi.TransactionFactory;
import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.spi.PersistentTableBulkIdStrategy;
import org.hibernate.hql.spi.QueryTranslatorFactory; import org.hibernate.hql.spi.QueryTranslatorFactory;
import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
@ -97,6 +101,18 @@ public class SettingsFactory implements Serializable {
// Transaction settings: // Transaction settings:
settings.setJtaPlatform( serviceRegistry.getService( JtaPlatform.class ) ); settings.setJtaPlatform( serviceRegistry.getService( JtaPlatform.class ) );
MultiTableBulkIdStrategy multiTableBulkIdStrategy = serviceRegistry.getService( StrategySelector.class )
.resolveStrategy(
MultiTableBulkIdStrategy.class,
properties.getProperty( AvailableSettings.HQL_BULK_ID_STRATEGY )
);
if ( multiTableBulkIdStrategy == null ) {
multiTableBulkIdStrategy = jdbcServices.getDialect().supportsTemporaryTables()
? TemporaryTableBulkIdStrategy.INSTANCE
: new PersistentTableBulkIdStrategy();
}
settings.setMultiTableBulkIdStrategy( multiTableBulkIdStrategy );
boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(AvailableSettings.FLUSH_BEFORE_COMPLETION, properties); boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(AvailableSettings.FLUSH_BEFORE_COMPLETION, properties);
if ( debugEnabled ) { if ( debugEnabled ) {
LOG.debugf( "Automatic flush during beforeCompletion(): %s", enabledDisabled(flushBeforeCompletion) ); LOG.debugf( "Automatic flush during beforeCompletion(): %s", enabledDisabled(flushBeforeCompletion) );

View File

@ -31,9 +31,12 @@ import java.util.List;
import antlr.RecognitionException; import antlr.RecognitionException;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection; import org.hibernate.engine.spi.RowSelection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker; import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.QuerySyntaxException; import org.hibernate.hql.internal.ast.QuerySyntaxException;
import org.hibernate.hql.internal.ast.SqlGenerator; import org.hibernate.hql.internal.ast.SqlGenerator;
@ -45,17 +48,17 @@ import org.hibernate.persister.entity.Queryable;
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class BasicExecutor extends AbstractStatementExecutor { public class BasicExecutor implements StatementExecutor {
private final SessionFactoryImplementor factory;
private final Queryable persister; private final Queryable persister;
private final String sql; private final String sql;
private final List parameterSpecifications; private final List parameterSpecifications;
public BasicExecutor(HqlSqlWalker walker, Queryable persister) { public BasicExecutor(HqlSqlWalker walker, Queryable persister) {
super(walker, null); this.factory = walker.getSessionFactoryHelper().getFactory();
this.persister = persister; this.persister = persister;
try { try {
SqlGenerator gen = new SqlGenerator( getFactory() ); SqlGenerator gen = new SqlGenerator( factory );
gen.statement( walker.getAST() ); gen.statement( walker.getAST() );
sql = gen.getSQL(); sql = gen.getSQL();
gen.getParseErrorHandler().throwQueryException(); gen.getParseErrorHandler().throwQueryException();
@ -71,8 +74,13 @@ public class BasicExecutor extends AbstractStatementExecutor {
} }
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, persister );
coordinateSharedCacheCleanup( session ); if ( session.isEventSource() ) {
( (EventSource) session ).getActionQueue().addAction( action );
}
else {
action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
}
PreparedStatement st = null; PreparedStatement st = null;
RowSelection selection = parameters.getRowSelection(); RowSelection selection = parameters.getRowSelection();
@ -101,16 +109,7 @@ public class BasicExecutor extends AbstractStatementExecutor {
} }
} }
catch( SQLException sqle ) { catch( SQLException sqle ) {
throw getFactory().getSQLExceptionHelper().convert( throw factory.getSQLExceptionHelper().convert( sqle, "could not execute update query", sql );
sqle,
"could not execute update query",
sql
);
} }
} }
@Override
protected Queryable[] getAffectedQueryables() {
return new Queryable[] { persister };
}
} }

View File

@ -1,10 +1,10 @@
/* /*
* Hibernate, Relational Persistence for Idiomatic Java * Hibernate, Relational Persistence for Idiomatic Java
* *
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as * Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,147 +20,46 @@
* Free Software Foundation, Inc. * Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*
*/ */
package org.hibernate.hql.internal.ast.exec; package org.hibernate.hql.internal.ast.exec;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Iterator;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker; import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.tree.DeleteStatement; import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.internal.ast.tree.FromElement;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.Delete;
/** /**
* Implementation of MultiTableDeleteExecutor. * Implementation of MultiTableDeleteExecutor.
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class MultiTableDeleteExecutor extends AbstractStatementExecutor { public class MultiTableDeleteExecutor implements StatementExecutor {
private final MultiTableBulkIdStrategy.DeleteHandler deleteHandler;
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
MultiTableDeleteExecutor.class.getName());
private final Queryable persister;
private final String idInsertSelect;
private final String[] deletes;
public MultiTableDeleteExecutor(HqlSqlWalker walker) { public MultiTableDeleteExecutor(HqlSqlWalker walker) {
super(walker, null); MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper()
.getFactory()
if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { .getSettings()
throw new HibernateException( "cannot doAfterTransactionCompletion multi-table deletes using dialect not supporting temp tables" ); .getMultiTableBulkIdStrategy();
} this.deleteHandler = strategy.buildDeleteHandler( walker.getSessionFactoryHelper().getFactory(), walker );
DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST();
FromElement fromElement = deleteStatement.getFromClause().getFromElement();
String bulkTargetAlias = fromElement.getTableAlias();
this.persister = fromElement.getQueryable();
this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() );
LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect );
String[] tableNames = persister.getConstraintOrderedTableNameClosure();
String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure();
String idSubselect = generateIdSubselect( persister );
deletes = new String[tableNames.length];
for ( int i = tableNames.length - 1; i >= 0; i-- ) {
// TODO : an optimization here would be to consider cascade deletes and not gen those delete statements;
// the difficulty is the ordering of the tables here vs the cascade attributes on the persisters ->
// the table info gotten here should really be self-contained (i.e., a class representation
// defining all the needed attributes), then we could then get an array of those
final Delete delete = new Delete()
.setTableName( tableNames[i] )
.setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" );
if ( getFactory().getSettings().isCommentsEnabled() ) {
delete.setComment( "bulk delete" );
}
deletes[i] = delete.toStatementString();
}
} }
public String[] getSqlStatements() { public String[] getSqlStatements() {
return deletes; return deleteHandler.getSqlStatements();
} }
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
coordinateSharedCacheCleanup( session ); BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, deleteHandler.getTargetedQueryable() );
if ( session.isEventSource() ) {
createTemporaryTableIfNecessary( persister, session ); ( (EventSource) session ).getActionQueue().addAction( action );
try {
// First, save off the pertinent ids, saving the number of pertinent ids for return
PreparedStatement ps = null;
int resultCount = 0;
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
Iterator paramSpecifications = getIdSelectParameterSpecifications().iterator();
int pos = 1;
while ( paramSpecifications.hasNext() ) {
final ParameterSpecification paramSpec = ( ParameterSpecification ) paramSpecifications.next();
pos += paramSpec.bind( ps, parameters, session, pos );
} }
resultCount = ps.executeUpdate(); else {
} action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw getFactory().getSQLExceptionHelper().convert(
e,
"could not insert/select ids for bulk delete",
idInsertSelect
);
} }
// Start performing the deletes return deleteHandler.execute( session, parameters );
for ( int i = 0; i < deletes.length; i++ ) {
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( deletes[i], false );
ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw getFactory().getSQLExceptionHelper().convert(
e,
"error performing bulk delete",
deletes[i]
);
}
}
return resultCount;
}
finally {
dropTemporaryTableIfNecessary( persister, session );
}
}
@Override
protected Queryable[] getAffectedQueryables() {
return new Queryable[] { persister };
} }
} }

View File

@ -24,178 +24,44 @@
*/ */
package org.hibernate.hql.internal.ast.exec; package org.hibernate.hql.internal.ast.exec;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.engine.spi.QueryParameters; import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker; import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.tree.AssignmentSpecification; import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.internal.ast.tree.FromElement;
import org.hibernate.hql.internal.ast.tree.UpdateStatement;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.Update;
/** /**
* Implementation of MultiTableUpdateExecutor. * Implementation of MultiTableUpdateExecutor.
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class MultiTableUpdateExecutor extends AbstractStatementExecutor { public class MultiTableUpdateExecutor implements StatementExecutor {
private final MultiTableBulkIdStrategy.UpdateHandler updateHandler;
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
MultiTableUpdateExecutor.class.getName());
private final Queryable persister;
private final String idInsertSelect;
private final String[] updates;
private final ParameterSpecification[][] hqlParameters;
public MultiTableUpdateExecutor(HqlSqlWalker walker) { public MultiTableUpdateExecutor(HqlSqlWalker walker) {
super(walker, null); MultiTableBulkIdStrategy strategy = walker.getSessionFactoryHelper()
.getFactory()
if ( !walker.getSessionFactoryHelper().getFactory().getDialect().supportsTemporaryTables() ) { .getSettings()
throw new HibernateException( "cannot doAfterTransactionCompletion multi-table updates using dialect not supporting temp tables" ); .getMultiTableBulkIdStrategy();
} this.updateHandler = strategy.buildUpdateHandler( walker.getSessionFactoryHelper().getFactory(), walker );
UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST();
FromElement fromElement = updateStatement.getFromClause().getFromElement();
String bulkTargetAlias = fromElement.getTableAlias();
this.persister = fromElement.getQueryable();
this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() );
LOG.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect );
String[] tableNames = persister.getConstraintOrderedTableNameClosure();
String[][] columnNames = persister.getContraintOrderedTableKeyColumnClosure();
String idSubselect = generateIdSubselect( persister );
List assignmentSpecifications = walker.getAssignmentSpecifications();
updates = new String[tableNames.length];
hqlParameters = new ParameterSpecification[tableNames.length][];
for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) {
boolean affected = false;
List parameterList = new ArrayList();
Update update = new Update( getFactory().getDialect() )
.setTableName( tableNames[tableIndex] )
.setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" );
if ( getFactory().getSettings().isCommentsEnabled() ) {
update.setComment( "bulk update" );
}
final Iterator itr = assignmentSpecifications.iterator();
while ( itr.hasNext() ) {
final AssignmentSpecification specification = ( AssignmentSpecification ) itr.next();
if ( specification.affectsTable( tableNames[tableIndex] ) ) {
affected = true;
update.appendAssignmentFragment( specification.getSqlAssignmentFragment() );
if ( specification.getParameters() != null ) {
for ( int paramIndex = 0; paramIndex < specification.getParameters().length; paramIndex++ ) {
parameterList.add( specification.getParameters()[paramIndex] );
}
}
}
}
if ( affected ) {
updates[tableIndex] = update.toStatementString();
hqlParameters[tableIndex] = ( ParameterSpecification[] ) parameterList.toArray( new ParameterSpecification[0] );
}
}
}
public Queryable getAffectedQueryable() {
return persister;
} }
public String[] getSqlStatements() { public String[] getSqlStatements() {
return updates; return updateHandler.getSqlStatements();
} }
public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException { public int execute(QueryParameters parameters, SessionImplementor session) throws HibernateException {
coordinateSharedCacheCleanup( session ); BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, updateHandler.getTargetedQueryable() );
createTemporaryTableIfNecessary( persister, session ); if ( session.isEventSource() ) {
( (EventSource) session ).getActionQueue().addAction( action );
try {
// First, save off the pertinent ids, as the return value
PreparedStatement ps = null;
int resultCount = 0;
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
// int parameterStart = getWalker().getNumberOfParametersInSetClause();
// List allParams = getIdSelectParameterSpecifications();
// Iterator whereParams = allParams.subList( parameterStart, allParams.size() ).iterator();
Iterator whereParams = getIdSelectParameterSpecifications().iterator();
int sum = 1; // jdbc params are 1-based
while ( whereParams.hasNext() ) {
sum += ( ( ParameterSpecification ) whereParams.next() ).bind( ps, parameters, session, sum );
} }
resultCount = ps.executeUpdate(); else {
} action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw getFactory().getSQLExceptionHelper().convert(
e,
"could not insert/select ids for bulk update",
idInsertSelect
);
} }
// Start performing the updates return updateHandler.execute( session, parameters );
for ( int i = 0; i < updates.length; i++ ) {
if ( updates[i] == null ) {
continue;
}
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false );
if ( hqlParameters[i] != null ) {
int position = 1; // jdbc params are 1-based
for ( int x = 0; x < hqlParameters[i].length; x++ ) {
position += hqlParameters[i][x].bind( ps, parameters, session, position );
}
}
ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw getFactory().getSQLExceptionHelper().convert(
e,
"error performing bulk update",
updates[i]
);
}
}
return resultCount;
}
finally {
dropTemporaryTableIfNecessary( persister, session );
}
}
@Override
protected Queryable[] getAffectedQueryables() {
return new Queryable[] { persister };
} }
} }

View File

@ -0,0 +1,173 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.hql.spi;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import antlr.RecognitionException;
import antlr.collections.AST;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.SqlGenerator;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.InsertSelect;
import org.hibernate.sql.Select;
import org.hibernate.sql.SelectFragment;
/**
* @author Steve Ebersole
*/
public class AbstractTableBasedBulkIdHandler {
private final SessionFactoryImplementor sessionFactory;
private final HqlSqlWalker walker;
public AbstractTableBasedBulkIdHandler(SessionFactoryImplementor sessionFactory, HqlSqlWalker walker) {
this.sessionFactory = sessionFactory;
this.walker = walker;
}
protected SessionFactoryImplementor factory() {
return sessionFactory;
}
protected HqlSqlWalker walker() {
return walker;
}
protected JDBCException convert(SQLException e, String message, String sql) {
throw factory().getSQLExceptionHelper().convert( e, message, sql );
}
protected static class ProcessedWhereClause {
public static final ProcessedWhereClause NO_WHERE_CLAUSE = new ProcessedWhereClause();
private final String userWhereClauseFragment;
private final List<ParameterSpecification> idSelectParameterSpecifications;
private ProcessedWhereClause() {
this( "", Collections.<ParameterSpecification>emptyList() );
}
public ProcessedWhereClause(String userWhereClauseFragment, List<ParameterSpecification> idSelectParameterSpecifications) {
this.userWhereClauseFragment = userWhereClauseFragment;
this.idSelectParameterSpecifications = idSelectParameterSpecifications;
}
public String getUserWhereClauseFragment() {
return userWhereClauseFragment;
}
public List<ParameterSpecification> getIdSelectParameterSpecifications() {
return idSelectParameterSpecifications;
}
}
@SuppressWarnings("unchecked")
protected ProcessedWhereClause processWhereClause(AST whereClause) {
if ( whereClause.getNumberOfChildren() != 0 ) {
// If a where clause was specified in the update/delete query, use it to limit the
// returned ids here...
try {
SqlGenerator sqlGenerator = new SqlGenerator( sessionFactory );
sqlGenerator.whereClause( whereClause );
String userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where "
List<ParameterSpecification> idSelectParameterSpecifications = sqlGenerator.getCollectedParameters();
return new ProcessedWhereClause( userWhereClause, idSelectParameterSpecifications );
}
catch ( RecognitionException e ) {
throw new HibernateException( "Unable to generate id select for DML operation", e );
}
}
else {
return ProcessedWhereClause.NO_WHERE_CLAUSE;
}
}
protected String generateIdInsertSelect(Queryable persister, String tableAlias, ProcessedWhereClause whereClause) {
Select select = new Select( sessionFactory.getDialect() );
SelectFragment selectFragment = new SelectFragment()
.addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() );
select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) + extraIdSelectValues() );
String rootTableName = persister.getTableName();
String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false );
String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false );
select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment );
if ( whereJoinFragment == null ) {
whereJoinFragment = "";
}
else {
whereJoinFragment = whereJoinFragment.trim();
if ( whereJoinFragment.startsWith( "and" ) ) {
whereJoinFragment = whereJoinFragment.substring( 4 );
}
}
if ( whereClause.userWhereClauseFragment.length() > 0 ) {
if ( whereJoinFragment.length() > 0 ) {
whereJoinFragment += " and ";
}
}
select.setWhereClause( whereJoinFragment + whereClause.userWhereClauseFragment );
InsertSelect insert = new InsertSelect( sessionFactory.getDialect() );
if ( sessionFactory.getSettings().isCommentsEnabled() ) {
insert.setComment( "insert-select for " + persister.getEntityName() + " ids" );
}
insert.setTableName( determineIdTableName( persister ) );
insert.setSelect( select );
return insert.toStatementString();
}
protected String extraIdSelectValues() {
return "";
}
protected String determineIdTableName(Queryable persister) {
return persister.getTemporaryIdTableName();
}
protected String generateIdSubselect(Queryable persister) {
return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) +
" from " + determineIdTableName( persister );
}
protected void prepareForUse(Queryable persister, SessionImplementor session) {
}
protected void releaseFromUse(Queryable persister, SessionImplementor session) {
}
}

View File

@ -0,0 +1,105 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.hql.spi;
import java.util.Map;
import org.hibernate.cfg.Mappings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.persister.entity.Queryable;
/**
* Generalized strategy contract for handling multi-table bulk HQL operations.
*
* @author Steve Ebersole
*/
public interface MultiTableBulkIdStrategy {
/**
* Prepare the strategy. Called as the SessionFactory is being built. Intended patterns here include:<ul>
* <li>Adding tables to the passed Mappings, to be picked by by "schema management tools"</li>
* <li>Manually creating the tables immediately through the passed JDBC Connection access</li>
* </ul>
*
* @param dialect The dialect
* @param connectionAccess Access to the JDBC Connection
* @param mappings The Hibernate Mappings object, for access to O/RM mapping information
* @param mapping The Hibernate Mapping contract, mainly for use in DDL generation
* @param settings Configuration settings
*/
public void prepare(Dialect dialect, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping, Map settings);
/**
* Release the strategy. Called as the SessionFactory is being shut down.
*
* @param dialect The dialect
* @param connectionAccess Access to the JDBC Connection
*/
public void release(Dialect dialect, JdbcConnectionAccess connectionAccess);
/**
* Handler for dealing with multi-table HQL bulk update statements.
*/
public static interface UpdateHandler {
public Queryable getTargetedQueryable();
public String[] getSqlStatements();
public int execute(SessionImplementor session, QueryParameters queryParameters);
}
/**
* Build a handler capable of handling the bulk update indicated by the given walker.
*
* @param factory The SessionFactory
* @param walker The AST walker, representing the update query
*
* @return The handler
*/
public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker);
/**
* Handler for dealing with multi-table HQL bulk delete statements.
*/
public static interface DeleteHandler {
public Queryable getTargetedQueryable();
public String[] getSqlStatements();
public int execute(SessionImplementor session, QueryParameters queryParameters);
}
/**
* Build a handler capable of handling the bulk delete indicated by the given walker.
*
* @param factory The SessionFactory
* @param walker The AST walker, representing the delete query
*
* @return The handler
*/
public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker);
}

View File

@ -0,0 +1,241 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.hql.spi;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Mappings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.internal.AbstractSessionImpl;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Table;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.type.UUIDCharType;
/**
* @author Steve Ebersole
*/
public class PersistentTableBulkIdStrategy implements MultiTableBulkIdStrategy {
private static final Logger log = Logger.getLogger( PersistentTableBulkIdStrategy.class );
public static final String CLEAN_UP_ID_TABLES = "hibernate.hql.bulk_id_strategy.persistent.clean_up";
private boolean cleanUpTables;
private List<String> tableCleanUpDdl;
@Override
public void prepare(
Dialect dialect,
JdbcConnectionAccess connectionAccess,
Mappings mappings,
Mapping mapping,
Map settings) {
cleanUpTables = ConfigurationHelper.getBoolean( CLEAN_UP_ID_TABLES, settings, false );
final Iterator<PersistentClass> entityMappings = mappings.iterateClasses();
final List<Table> idTableDefinitions = new ArrayList<Table>();
while ( entityMappings.hasNext() ) {
final PersistentClass entityMapping = entityMappings.next();
final Table idTableDefinition = generateIdTableDefinition( entityMapping );
idTableDefinitions.add( idTableDefinition );
}
exportTableDefinitions( idTableDefinitions, dialect, connectionAccess, mappings, mapping );
}
protected Table generateIdTableDefinition(PersistentClass entityMapping) {
Table idTable = new Table( entityMapping.getTemporaryIdTableName() );
Iterator itr = entityMapping.getIdentityTable().getPrimaryKey().getColumnIterator();
while( itr.hasNext() ) {
Column column = (Column) itr.next();
idTable.addColumn( column.clone() );
}
Column sessionIdColumn = new Column( "hib_sess_id" );
sessionIdColumn.setSqlType( "CHAR(36)" );
sessionIdColumn.setComment( "Used to hold the Hibernate Session identifier" );
idTable.addColumn( sessionIdColumn );
idTable.setComment( "Used to hold id values for the " + entityMapping.getEntityName() + " class" );
return idTable;
}
protected void exportTableDefinitions(
List<Table> idTableDefinitions,
Dialect dialect,
JdbcConnectionAccess connectionAccess,
Mappings mappings,
Mapping mapping) {
try {
Connection connection = connectionAccess.obtainConnection();
try {
Statement statement = connection.createStatement();
for ( Table idTableDefinition : idTableDefinitions ) {
if ( cleanUpTables ) {
if ( tableCleanUpDdl == null ) {
tableCleanUpDdl = new ArrayList<String>();
}
tableCleanUpDdl.add( idTableDefinition.sqlDropString( dialect, null, null ) );
}
try {
statement.execute( idTableDefinition.sqlCreateString( dialect, mapping, null, null ) );
}
catch (SQLException e) {
log.debugf( "Error attempting to export id-table [%s] : %s", idTableDefinition.getName(), e.getMessage() );
}
}
}
catch (SQLException e) {
log.error( "Unable to use JDBC Connection to create Statement", e );
}
finally {
try {
connectionAccess.releaseConnection( connection );
}
catch (SQLException ignore) {
}
}
}
catch (SQLException e) {
log.error( "Unable obtain JDBC Connection", e );
}
}
@Override
public void release(Dialect dialect, JdbcConnectionAccess connectionAccess) {
if ( ! cleanUpTables ) {
return;
}
try {
Connection connection = connectionAccess.obtainConnection();
try {
Statement statement = connection.createStatement();
for ( String cleanupDdl : tableCleanUpDdl ) {
try {
statement.execute( cleanupDdl );
}
catch (SQLException e) {
log.debugf( "Error attempting to cleanup id-table : [%s]", e.getMessage() );
}
}
}
catch (SQLException e) {
log.error( "Unable to use JDBC Connection to create Statement", e );
}
finally {
try {
connectionAccess.releaseConnection( connection );
}
catch (SQLException ignore) {
}
}
}
catch (SQLException e) {
log.error( "Unable obtain JDBC Connection", e );
}
}
@Override
public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
return new TableBasedUpdateHandlerImpl( factory, walker ) {
@Override
protected String extraIdSelectValues() {
return "cast(? as char)";
}
@Override
protected String generateIdSubselect(Queryable persister) {
return super.generateIdSubselect( persister ) + " where hib_sess_id=?";
}
@Override
protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
if ( ! AbstractSessionImpl.class.isInstance( session ) ) {
throw new HibernateException( "Only available on SessionImpl instances" );
}
UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), pos, session );
return 1;
}
@Override
protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException {
if ( ! AbstractSessionImpl.class.isInstance( session ) ) {
throw new HibernateException( "Only available on SessionImpl instances" );
}
UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), position, session );
}
};
}
@Override
public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
return new TableBasedDeleteHandlerImpl( factory, walker ) {
@Override
protected String extraIdSelectValues() {
return "cast(? as char)";
}
@Override
protected String generateIdSubselect(Queryable persister) {
return super.generateIdSubselect( persister ) + " where hib_sess_id=?";
}
@Override
protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
if ( ! AbstractSessionImpl.class.isInstance( session ) ) {
throw new HibernateException( "Only available on SessionImpl instances" );
}
UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), pos, session );
return 1;
}
@Override
protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException {
if ( ! AbstractSessionImpl.class.isInstance( session ) ) {
throw new HibernateException( "Only available on SessionImpl instances" );
}
UUIDCharType.INSTANCE.set( ps, ( (AbstractSessionImpl) session ).getSessionIdentifier(), 1, session );
}
};
}
}

View File

@ -0,0 +1,164 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.hql.spi;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import org.jboss.logging.Logger;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.tree.DeleteStatement;
import org.hibernate.hql.internal.ast.tree.FromElement;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.Delete;
/**
* @author Steve Ebersole
*/
class TableBasedDeleteHandlerImpl
extends AbstractTableBasedBulkIdHandler
implements MultiTableBulkIdStrategy.DeleteHandler {
private static final Logger log = Logger.getLogger( TableBasedDeleteHandlerImpl.class );
private final Queryable targetedPersister;
private final String idInsertSelect;
private final List<ParameterSpecification> idSelectParameterSpecifications;
private final String[] deletes;
TableBasedDeleteHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) {
super( factory, walker );
DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST();
FromElement fromElement = deleteStatement.getFromClause().getFromElement();
this.targetedPersister = fromElement.getQueryable();
final String bulkTargetAlias = fromElement.getTableAlias();
final ProcessedWhereClause processedWhereClause = processWhereClause( deleteStatement.getWhereClause() );
this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications();
this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause );
log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table delete) : {0}", idInsertSelect );
String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure();
String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure();
String idSubselect = generateIdSubselect( targetedPersister );
deletes = new String[tableNames.length];
for ( int i = tableNames.length - 1; i >= 0; i-- ) {
// TODO : an optimization here would be to consider cascade deletes and not gen those delete statements;
// the difficulty is the ordering of the tables here vs the cascade attributes on the persisters ->
// the table info gotten here should really be self-contained (i.e., a class representation
// defining all the needed attributes), then we could then get an array of those
final Delete delete = new Delete()
.setTableName( tableNames[i] )
.setWhere( "(" + StringHelper.join( ", ", columnNames[i] ) + ") IN (" + idSubselect + ")" );
if ( factory().getSettings().isCommentsEnabled() ) {
delete.setComment( "bulk delete" );
}
deletes[i] = delete.toStatementString();
}
}
@Override
public Queryable getTargetedQueryable() {
return targetedPersister;
}
@Override
public String[] getSqlStatements() {
return deletes;
}
@Override
public int execute(SessionImplementor session, QueryParameters queryParameters) {
prepareForUse( targetedPersister, session );
try {
PreparedStatement ps = null;
int resultCount = 0;
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
int pos = 1;
pos += handlePrependedParametersOnIdSelection( ps, session, pos );
for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) {
pos += parameterSpecification.bind( ps, queryParameters, session, pos );
}
resultCount = ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw convert( e, "could not insert/select ids for bulk delete", idInsertSelect );
}
// Start performing the deletes
for ( String delete : deletes ) {
try {
try {
ps = session.getTransactionCoordinator()
.getJdbcCoordinator()
.getStatementPreparer()
.prepareStatement( delete, false );
handleAddedParametersOnDelete( ps, session );
ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch (SQLException e) {
throw convert( e, "error performing bulk delete", delete );
}
}
return resultCount;
}
finally {
releaseFromUse( targetedPersister, session );
}
}
protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
return 0;
}
protected void handleAddedParametersOnDelete(PreparedStatement ps, SessionImplementor session) throws SQLException {
}
}

View File

@ -0,0 +1,190 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.hql.spi;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.jboss.logging.Logger;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.tree.AssignmentSpecification;
import org.hibernate.hql.internal.ast.tree.FromElement;
import org.hibernate.hql.internal.ast.tree.UpdateStatement;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.Update;
/**
* @author Steve Ebersole
*/
public class TableBasedUpdateHandlerImpl
extends AbstractTableBasedBulkIdHandler
implements MultiTableBulkIdStrategy.UpdateHandler {
private static final Logger log = Logger.getLogger( TableBasedUpdateHandlerImpl.class );
private final Queryable targetedPersister;
private final String idInsertSelect;
private final List<ParameterSpecification> idSelectParameterSpecifications;
private final String[] updates;
private final ParameterSpecification[][] assignmentParameterSpecifications;
@SuppressWarnings("unchecked")
TableBasedUpdateHandlerImpl(SessionFactoryImplementor factory, HqlSqlWalker walker) {
super( factory, walker );
UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST();
FromElement fromElement = updateStatement.getFromClause().getFromElement();
this.targetedPersister = fromElement.getQueryable();
final String bulkTargetAlias = fromElement.getTableAlias();
final ProcessedWhereClause processedWhereClause = processWhereClause( updateStatement.getWhereClause() );
this.idSelectParameterSpecifications = processedWhereClause.getIdSelectParameterSpecifications();
this.idInsertSelect = generateIdInsertSelect( targetedPersister, bulkTargetAlias, processedWhereClause );
log.tracev( "Generated ID-INSERT-SELECT SQL (multi-table update) : {0}", idInsertSelect );
String[] tableNames = targetedPersister.getConstraintOrderedTableNameClosure();
String[][] columnNames = targetedPersister.getContraintOrderedTableKeyColumnClosure();
String idSubselect = generateIdSubselect( targetedPersister );
updates = new String[tableNames.length];
assignmentParameterSpecifications = new ParameterSpecification[tableNames.length][];
for ( int tableIndex = 0; tableIndex < tableNames.length; tableIndex++ ) {
boolean affected = false;
final List<ParameterSpecification> parameterList = new ArrayList<ParameterSpecification>();
final Update update = new Update( factory().getDialect() )
.setTableName( tableNames[tableIndex] )
.setWhere( "(" + StringHelper.join( ", ", columnNames[tableIndex] ) + ") IN (" + idSubselect + ")" );
if ( factory().getSettings().isCommentsEnabled() ) {
update.setComment( "bulk update" );
}
final List<AssignmentSpecification> assignmentSpecifications = walker.getAssignmentSpecifications();
for ( AssignmentSpecification assignmentSpecification : assignmentSpecifications ) {
if ( assignmentSpecification.affectsTable( tableNames[tableIndex] ) ) {
affected = true;
update.appendAssignmentFragment( assignmentSpecification.getSqlAssignmentFragment() );
if ( assignmentSpecification.getParameters() != null ) {
for ( int paramIndex = 0; paramIndex < assignmentSpecification.getParameters().length; paramIndex++ ) {
parameterList.add( assignmentSpecification.getParameters()[paramIndex] );
}
}
}
}
if ( affected ) {
updates[tableIndex] = update.toStatementString();
assignmentParameterSpecifications[tableIndex] = parameterList.toArray( new ParameterSpecification[parameterList.size()] );
}
}
}
@Override
public Queryable getTargetedQueryable() {
return targetedPersister;
}
@Override
public String[] getSqlStatements() {
return updates;
}
@Override
public int execute(SessionImplementor session, QueryParameters queryParameters) {
prepareForUse( targetedPersister, session );
try {
// First, save off the pertinent ids, as the return value
PreparedStatement ps = null;
int resultCount = 0;
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( idInsertSelect, false );
int sum = 1;
sum += handlePrependedParametersOnIdSelection( ps, session, sum );
for ( ParameterSpecification parameterSpecification : idSelectParameterSpecifications ) {
sum += parameterSpecification.bind( ps, queryParameters, session, sum );
}
resultCount = ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw convert( e, "could not insert/select ids for bulk update", idInsertSelect );
}
// Start performing the updates
for ( int i = 0; i < updates.length; i++ ) {
if ( updates[i] == null ) {
continue;
}
try {
try {
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( updates[i], false );
if ( assignmentParameterSpecifications[i] != null ) {
int position = 1; // jdbc params are 1-based
for ( int x = 0; x < assignmentParameterSpecifications[i].length; x++ ) {
position += assignmentParameterSpecifications[i][x].bind( ps, queryParameters, session, position );
}
handleAddedParametersOnUpdate( ps, session, position );
}
ps.executeUpdate();
}
finally {
if ( ps != null ) {
ps.close();
}
}
}
catch( SQLException e ) {
throw convert( e, "error performing bulk update", updates[i] );
}
}
return resultCount;
}
finally {
releaseFromUse( targetedPersister, session );
}
}
protected int handlePrependedParametersOnIdSelection(PreparedStatement ps, SessionImplementor session, int pos) throws SQLException {
return 0;
}
protected void handleAddedParametersOnUpdate(PreparedStatement ps, SessionImplementor session, int position) throws SQLException {
//To change body of created methods use File | Settings | File Templates.
}
}

View File

@ -1,7 +1,7 @@
/* /*
* Hibernate, Relational Persistence for Idiomatic Java * Hibernate, Relational Persistence for Idiomatic Java
* *
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as * Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc. * distributed under license by Red Hat Inc.
@ -21,122 +21,155 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.hql.internal.ast.exec; package org.hibernate.hql.spi;
import java.sql.Connection; import java.sql.Connection;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.SQLWarning; import java.sql.SQLWarning;
import java.sql.Statement; import java.sql.Statement;
import java.util.Collections;
import java.util.List;
import antlr.RecognitionException;
import antlr.collections.AST;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import org.hibernate.HibernateException; import org.hibernate.cfg.Mappings;
import org.hibernate.action.internal.BulkOperationCleanupAction; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.hql.internal.ast.HqlSqlWalker; import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.SqlGenerator;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.jdbc.AbstractWork; import org.hibernate.jdbc.AbstractWork;
import org.hibernate.persister.entity.Queryable; import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.InsertSelect;
import org.hibernate.sql.Select;
import org.hibernate.sql.SelectFragment;
/** /**
* Implementation of AbstractStatementExecutor.
*
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public abstract class AbstractStatementExecutor implements StatementExecutor { public class TemporaryTableBulkIdStrategy implements MultiTableBulkIdStrategy {
public static final TemporaryTableBulkIdStrategy INSTANCE = new TemporaryTableBulkIdStrategy();
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, private static final CoreMessageLogger log = Logger.getMessageLogger(
AbstractStatementExecutor.class.getName()); CoreMessageLogger.class,
TemporaryTableBulkIdStrategy.class.getName()
);
private final HqlSqlWalker walker; @Override
private List idSelectParameterSpecifications = Collections.EMPTY_LIST; public void prepare(Dialect dialect, JdbcConnectionAccess connectionAccess, Mappings mappings, Mapping mapping) {
// nothing to do
public AbstractStatementExecutor( HqlSqlWalker walker,
CoreMessageLogger log ) {
this.walker = walker;
} }
protected HqlSqlWalker getWalker() { @Override
return walker; public void release(Dialect dialect, JdbcConnectionAccess connectionAccess) {
// nothing to do
} }
protected SessionFactoryImplementor getFactory() { @Override
return walker.getSessionFactoryHelper().getFactory(); public UpdateHandler buildUpdateHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
return new TableBasedUpdateHandlerImpl( factory, walker ) {
@Override
protected void prepareForUse(Queryable persister, SessionImplementor session) {
createTempTable( persister, session );
} }
protected List getIdSelectParameterSpecifications() { @Override
return idSelectParameterSpecifications; protected void releaseFromUse(Queryable persister, SessionImplementor session) {
releaseTempTable( persister, session );
}
};
} }
protected abstract Queryable[] getAffectedQueryables(); @Override
public DeleteHandler buildDeleteHandler(SessionFactoryImplementor factory, HqlSqlWalker walker) {
return new TableBasedDeleteHandlerImpl( factory, walker ) {
@Override
protected void prepareForUse(Queryable persister, SessionImplementor session) {
createTempTable( persister, session );
}
protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) { @Override
Select select = new Select( getFactory().getDialect() ); protected void releaseFromUse(Queryable persister, SessionImplementor session) {
SelectFragment selectFragment = new SelectFragment() releaseTempTable( persister, session );
.addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); }
select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) ); };
}
String rootTableName = persister.getTableName();
String fromJoinFragment = persister.fromJoinFragment( tableAlias, true, false );
String whereJoinFragment = persister.whereJoinFragment( tableAlias, true, false );
select.setFromClause( rootTableName + ' ' + tableAlias + fromJoinFragment ); protected void createTempTable(Queryable persister, SessionImplementor session) {
// Don't really know all the codes required to adequately decipher returned jdbc exceptions here.
if ( whereJoinFragment == null ) { // simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail
whereJoinFragment = ""; TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister );
if ( shouldIsolateTemporaryTableDDL( session ) ) {
session.getTransactionCoordinator()
.getTransaction()
.createIsolationDelegate()
.delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() );
} }
else { else {
whereJoinFragment = whereJoinFragment.trim(); final Connection connection = session.getTransactionCoordinator()
if ( whereJoinFragment.startsWith( "and" ) ) { .getJdbcCoordinator()
whereJoinFragment = whereJoinFragment.substring( 4 ); .getLogicalConnection()
.getShareableConnectionProxy();
work.execute( connection );
session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.afterStatementExecution();
} }
} }
String userWhereClause = ""; protected void releaseTempTable(Queryable persister, SessionImplementor session) {
if ( whereClause.getNumberOfChildren() != 0 ) { if ( session.getFactory().getDialect().dropTemporaryTableAfterUse() ) {
// If a where clause was specified in the update/delete query, use it to limit the TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session );
// returned ids here... if ( shouldIsolateTemporaryTableDDL( session ) ) {
session.getTransactionCoordinator()
.getTransaction()
.createIsolationDelegate()
.delegateWork( work, session.getFactory().getSettings().isDataDefinitionInTransactionSupported() );
}
else {
final Connection connection = session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.getShareableConnectionProxy();
work.execute( connection );
session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.afterStatementExecution();
}
}
else {
// at the very least cleanup the data :)
PreparedStatement ps = null;
try { try {
SqlGenerator sqlGenerator = new SqlGenerator( getFactory() ); final String sql = "delete from " + persister.getTemporaryIdTableName();
sqlGenerator.whereClause( whereClause ); ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
userWhereClause = sqlGenerator.getSQL().substring( 7 ); // strip the " where " ps.executeUpdate();
idSelectParameterSpecifications = sqlGenerator.getCollectedParameters(); }
catch( Throwable t ) {
log.unableToCleanupTemporaryIdTable(t);
}
finally {
if ( ps != null ) {
try {
ps.close();
}
catch( Throwable ignore ) {
// ignore
}
} }
catch ( RecognitionException e ) {
throw new HibernateException( "Unable to generate id select for DML operation", e );
} }
if ( whereJoinFragment.length() > 0 ) {
whereJoinFragment += " and ";
} }
} }
select.setWhereClause( whereJoinFragment + userWhereClause ); @SuppressWarnings({ "UnnecessaryUnboxing" })
protected boolean shouldIsolateTemporaryTableDDL(SessionImplementor session) {
InsertSelect insert = new InsertSelect( getFactory().getDialect() ); Boolean dialectVote = session.getFactory().getDialect().performTemporaryTableDDLInIsolation();
if ( getFactory().getSettings().isCommentsEnabled() ) { if ( dialectVote != null ) {
insert.setComment( "insert-select for " + persister.getEntityName() + " ids" ); return dialectVote.booleanValue();
} }
insert.setTableName( persister.getTemporaryIdTableName() ); return session.getFactory().getSettings().isDataDefinitionImplicitCommit();
insert.setSelect( select );
return insert.toStatementString();
}
protected String generateIdSubselect(Queryable persister) {
return "select " + StringHelper.join( ", ", persister.getIdentifierColumnNames() ) +
" from " + persister.getTemporaryIdTableName();
} }
private static class TemporaryTableCreationWork extends AbstractWork { private static class TemporaryTableCreationWork extends AbstractWork {
@ -168,46 +201,24 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
} }
} }
catch( Exception e ) { catch( Exception e ) {
LOG.debug( "unable to create temporary id table [" + e.getMessage() + "]" ); log.debug( "unable to create temporary id table [" + e.getMessage() + "]" );
} }
} }
} }
protected void createTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) {
// Don't really know all the codes required to adequately decipher returned jdbc exceptions here.
// simply allow the failure to be eaten and the subsequent insert-selects/deletes should fail
TemporaryTableCreationWork work = new TemporaryTableCreationWork( persister );
if ( shouldIsolateTemporaryTableDDL() ) {
session.getTransactionCoordinator()
.getTransaction()
.createIsolationDelegate()
.delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() );
}
else {
final Connection connection = session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.getShareableConnectionProxy();
work.execute( connection );
session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.afterStatementExecution();
}
}
private static SqlExceptionHelper.WarningHandler CREATION_WARNING_HANDLER = new SqlExceptionHelper.WarningHandlerLoggingSupport() { private static SqlExceptionHelper.WarningHandler CREATION_WARNING_HANDLER = new SqlExceptionHelper.WarningHandlerLoggingSupport() {
public boolean doProcess() { public boolean doProcess() {
return LOG.isDebugEnabled(); return log.isDebugEnabled();
} }
public void prepare(SQLWarning warning) { public void prepare(SQLWarning warning) {
LOG.warningsCreatingTempTable( warning ); log.warningsCreatingTempTable( warning );
} }
@Override @Override
protected void logWarning(String description, String message) { protected void logWarning(String description, String message) {
LOG.debug( description ); log.debug( description );
LOG.debug( message ); log.debug( message );
} }
}; };
@ -240,71 +251,9 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
} }
} }
catch( Exception e ) { catch( Exception e ) {
LOG.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" ); log.warn( "unable to drop temporary id table after use [" + e.getMessage() + "]" );
} }
} }
} }
protected void dropTemporaryTableIfNecessary(final Queryable persister, final SessionImplementor session) {
if ( getFactory().getDialect().dropTemporaryTableAfterUse() ) {
TemporaryTableDropWork work = new TemporaryTableDropWork( persister, session );
if ( shouldIsolateTemporaryTableDDL() ) {
session.getTransactionCoordinator()
.getTransaction()
.createIsolationDelegate()
.delegateWork( work, getFactory().getSettings().isDataDefinitionInTransactionSupported() );
}
else {
final Connection connection = session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.getShareableConnectionProxy();
work.execute( connection );
session.getTransactionCoordinator()
.getJdbcCoordinator()
.getLogicalConnection()
.afterStatementExecution();
}
}
else {
// at the very least cleanup the data :)
PreparedStatement ps = null;
try {
final String sql = "delete from " + persister.getTemporaryIdTableName();
ps = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql, false );
ps.executeUpdate();
}
catch( Throwable t ) {
LOG.unableToCleanupTemporaryIdTable(t);
}
finally {
if ( ps != null ) {
try {
ps.close();
}
catch( Throwable ignore ) {
// ignore
}
}
}
}
}
protected void coordinateSharedCacheCleanup(SessionImplementor session) {
BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() );
if ( session.isEventSource() ) {
( ( EventSource ) session ).getActionQueue().addAction( action );
}
else {
action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session );
}
}
@SuppressWarnings({ "UnnecessaryUnboxing" })
protected boolean shouldIsolateTemporaryTableDDL() {
Boolean dialectVote = getFactory().getDialect().performTemporaryTableDDLInIsolation();
if (dialectVote != null) return dialectVote.booleanValue();
return getFactory().getSettings().isDataDefinitionImplicitCommit();
}
} }

View File

@ -27,6 +27,7 @@ import java.io.Serializable;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.MappingException; import org.hibernate.MappingException;
@ -51,6 +52,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.transaction.spi.TransactionContext; import org.hibernate.engine.transaction.spi.TransactionContext;
import org.hibernate.engine.transaction.spi.TransactionEnvironment; import org.hibernate.engine.transaction.spi.TransactionEnvironment;
import org.hibernate.id.uuid.StandardRandomStrategy;
import org.hibernate.jdbc.WorkExecutor; import org.hibernate.jdbc.WorkExecutor;
import org.hibernate.jdbc.WorkExecutorVisitable; import org.hibernate.jdbc.WorkExecutorVisitable;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -269,6 +271,15 @@ public abstract class AbstractSessionImpl implements Serializable, SharedSession
return jdbcConnectionAccess; return jdbcConnectionAccess;
} }
private UUID sessionIdentifier;
public UUID getSessionIdentifier() {
if ( sessionIdentifier == null ) {
sessionIdentifier = StandardRandomStrategy.INSTANCE.generateUUID( this );
}
return sessionIdentifier;
}
private static class NonContextualJdbcConnectionAccess implements JdbcConnectionAccess, Serializable { private static class NonContextualJdbcConnectionAccess implements JdbcConnectionAccess, Serializable {
private final ConnectionProvider connectionProvider; private final ConnectionProvider connectionProvider;

View File

@ -29,6 +29,7 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
import java.io.Serializable; import java.io.Serializable;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -53,6 +54,7 @@ import org.hibernate.EntityNameResolver;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.Interceptor; import org.hibernate.Interceptor;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.ObjectNotFoundException; import org.hibernate.ObjectNotFoundException;
import org.hibernate.QueryException; import org.hibernate.QueryException;
import org.hibernate.Session; import org.hibernate.Session;
@ -91,6 +93,8 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.SQLFunction; import org.hibernate.dialect.function.SQLFunction;
import org.hibernate.dialect.function.SQLFunctionRegistry; import org.hibernate.dialect.function.SQLFunctionRegistry;
import org.hibernate.engine.ResultSetMappingDefinition; import org.hibernate.engine.ResultSetMappingDefinition;
import org.hibernate.engine.jdbc.connections.spi.MultiTenantConnectionProvider;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper; import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.profile.Association; import org.hibernate.engine.profile.Association;
@ -483,6 +487,15 @@ public final class SessionFactoryImpl
LOG.debug( "Instantiated session factory" ); LOG.debug( "Instantiated session factory" );
settings.getMultiTableBulkIdStrategy().prepare(
dialect,
buildLocalConnectionAccess(),
cfg.createMappings(),
cfg.buildMapping(),
properties
);
if ( settings.isAutoCreateSchema() ) { if ( settings.isAutoCreateSchema() ) {
new SchemaExport( serviceRegistry, cfg ) new SchemaExport( serviceRegistry, cfg )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) ) .setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
@ -558,6 +571,32 @@ public final class SessionFactoryImpl
this.observer.sessionFactoryCreated( this ); this.observer.sessionFactoryCreated( this );
} }
private JdbcConnectionAccess buildLocalConnectionAccess() {
return new JdbcConnectionAccess() {
@Override
public Connection obtainConnection() throws SQLException {
return settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE
? serviceRegistry.getService( ConnectionProvider.class ).getConnection()
: serviceRegistry.getService( MultiTenantConnectionProvider.class ).getAnyConnection();
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
if ( settings.getMultiTenancyStrategy() == MultiTenancyStrategy.NONE ) {
serviceRegistry.getService( ConnectionProvider.class ).closeConnection( connection );
}
else {
serviceRegistry.getService( MultiTenantConnectionProvider.class ).releaseAnyConnection( connection );
}
}
@Override
public boolean supportsAggressiveRelease() {
return false;
}
};
}
@SuppressWarnings({ "unchecked" }) @SuppressWarnings({ "unchecked" })
private CustomEntityDirtinessStrategy determineCustomEntityDirtinessStrategy() { private CustomEntityDirtinessStrategy determineCustomEntityDirtinessStrategy() {
CustomEntityDirtinessStrategy defaultValue = new CustomEntityDirtinessStrategy() { CustomEntityDirtinessStrategy defaultValue = new CustomEntityDirtinessStrategy() {
@ -1321,6 +1360,8 @@ public final class SessionFactoryImpl
isClosed = true; isClosed = true;
settings.getMultiTableBulkIdStrategy().release( dialect, buildLocalConnectionAccess() );
Iterator iter = entityPersisters.values().iterator(); Iterator iter = entityPersisters.values().iterator();
while ( iter.hasNext() ) { while ( iter.hasNext() ) {
EntityPersister p = (EntityPersister) iter.next(); EntityPersister p = (EntityPersister) iter.next();

View File

@ -343,7 +343,8 @@ public class Column implements Selectable, Serializable, Cloneable {
/** /**
* Shallow copy, the value is not copied * Shallow copy, the value is not copied
*/ */
protected Object clone() { @Override
public Column clone() {
Column copy = new Column(); Column copy = new Column();
copy.setLength( length ); copy.setLength( length );
copy.setScale( scale ); copy.setScale( scale );

View File

@ -768,14 +768,14 @@ public abstract class PersistentClass implements Serializable, Filterable, MetaA
} }
public void prepareTemporaryTables(Mapping mapping, Dialect dialect) { public void prepareTemporaryTables(Mapping mapping, Dialect dialect) {
if ( dialect.supportsTemporaryTables() ) {
temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() ); temporaryIdTableName = dialect.generateTemporaryTableName( getTable().getName() );
if ( dialect.supportsTemporaryTables() ) {
Table table = new Table(); Table table = new Table();
table.setName( temporaryIdTableName ); table.setName( temporaryIdTableName );
Iterator itr = getTable().getPrimaryKey().getColumnIterator(); Iterator itr = getTable().getPrimaryKey().getColumnIterator();
while( itr.hasNext() ) { while( itr.hasNext() ) {
Column column = (Column) itr.next(); Column column = (Column) itr.next();
table.addColumn( (Column) column.clone() ); table.addColumn( column.clone() );
} }
temporaryIdTableDDL = table.sqlTemporaryTableCreateString( dialect, mapping ); temporaryIdTableDDL = table.sqlTemporaryTableCreateString( dialect, mapping );
} }