HHH-4034 - Update org.hibernate.action.BulkOperationCleanupAction to use new Region cache APIs

git-svn-id: https://svn.jboss.org/repos/hibernate/core/trunk@17084 1b8cb986-b30d-0410-93ca-fae66ebed9b2
This commit is contained in:
Steve Ebersole 2009-07-13 23:33:02 +00:00
parent b339e33121
commit a41506f404
8 changed files with 206 additions and 101 deletions

View File

@ -53,7 +53,11 @@ public abstract class CacheTestCaseBase extends FunctionalTestCase {
} }
public String[] getMappings() { public String[] getMappings() {
return new String[] { "cache/jbc2/functional/Item.hbm.xml", "cache/jbc2/functional/Customer.hbm.xml", "cache/jbc2/functional/Contact.hbm.xml" }; return new String[] {
"cache/jbc/functional/Item.hbm.xml",
"cache/jbc/functional/Customer.hbm.xml",
"cache/jbc/functional/Contact.hbm.xml"
};
} }
public void configure(Configuration cfg) { public void configure(Configuration cfg) {
@ -96,7 +100,7 @@ public abstract class CacheTestCaseBase extends FunctionalTestCase {
/** /**
* Apply any region-factory specific configurations. * Apply any region-factory specific configurations.
* *
* @param the Configuration to update. * @param cfg the Configuration to update.
*/ */
protected abstract void configureCacheFactory(Configuration cfg); protected abstract void configureCacheFactory(Configuration cfg);

View File

@ -118,7 +118,7 @@ public class PessimisticSessionRefreshTest extends DualNodeTestCaseBase
@Override @Override
public String[] getMappings() public String[] getMappings()
{ {
return new String[] { "cache/jbc2/functional/classloader/Account.hbm.xml" }; return new String[] { "cache/jbc/functional/classloader/Account.hbm.xml" };
} }
@Override @Override

View File

@ -105,7 +105,7 @@ extends DualNodeTestCaseBase
@Override @Override
public String[] getMappings() public String[] getMappings()
{ {
return new String[] { "cache/jbc2/functional/classloader/Account.hbm.xml" }; return new String[] { "cache/jbc/functional/classloader/Account.hbm.xml" };
} }
@Override @Override

View File

@ -31,8 +31,8 @@ import org.hibernate.test.util.SelectedClassnameClassLoaderTestSetup;
public class IsolatedCacheTestSetup extends SelectedClassnameClassLoaderTestSetup public class IsolatedCacheTestSetup extends SelectedClassnameClassLoaderTestSetup
{ {
public static final String DEF_CACHE_FACTORY_RESOURCE = "org/hibernate/cache/jbc2/builder/jbc2-configs.xml"; public static final String DEF_CACHE_FACTORY_RESOURCE = "org/hibernate/cache/jbc/builder/jbc2-configs.xml";
public static final String DEF_JGROUPS_RESOURCE = "org/hibernate/cache/jbc2/builder/jgroups-stacks.xml"; public static final String DEF_JGROUPS_RESOURCE = "org/hibernate/cache/jbc/builder/jgroups-stacks.xml";
private String[] isolatedClasses; private String[] isolatedClasses;
private String cacheConfig; private String cacheConfig;

View File

@ -36,8 +36,8 @@ import org.jgroups.JChannelFactory;
*/ */
public class CacheManagerTestSetup extends TestSetup public class CacheManagerTestSetup extends TestSetup
{ {
public static final String DEF_CACHE_FACTORY_RESOURCE = "org/hibernate/cache/jbc2/builder/jbc2-configs.xml"; public static final String DEF_CACHE_FACTORY_RESOURCE = "org/hibernate/cache/jbc/builder/jbc2-configs.xml";
public static final String DEF_JGROUPS_RESOURCE = "org/hibernate/cache/jbc2/builder/jgroups-stacks.xml"; public static final String DEF_JGROUPS_RESOURCE = "org/hibernate/cache/jbc/builder/jgroups-stacks.xml";
private final String jbcConfig; private final String jbcConfig;
private final String jgConfig; private final String jgConfig;

View File

@ -25,8 +25,12 @@
package org.hibernate.action; package org.hibernate.action;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.cache.access.SoftLock;
import org.hibernate.cache.access.EntityRegionAccessStrategy;
import org.hibernate.cache.access.CollectionRegionAccessStrategy;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Queryable; import org.hibernate.persister.entity.Queryable;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor; import org.hibernate.engine.SessionImplementor;
@ -36,49 +40,84 @@ import java.util.Set;
import java.util.Iterator; import java.util.Iterator;
import java.util.HashSet; import java.util.HashSet;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
/** /**
* Implementation of BulkOperationCleanupAction. * An {@link org.hibernate.engine.ActionQueue} {@link Executable} for ensuring
* shared cache cleanup in relation to performed bulk HQL queries.
* <p/>
* NOTE: currently this executes for <tt>INSERT</tt> queries as well as
* <tt>UPDATE</tt> and <tt>DELETE</tt> queries. For <tt>INSERT</tt> it is
* really not needed as we'd have no invalid entity/collection data to
* cleanup (we'd still nee to invalidate the appropriate update-timestamps
* regions) as a result of this query.
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class BulkOperationCleanupAction implements Executable, Serializable { public class BulkOperationCleanupAction implements Executable, Serializable {
private final Serializable[] affectedTableSpaces;
private final SessionImplementor session; private final Set entityCleanups = new HashSet();
private final Set collectionCleanups = new HashSet();
private final Set affectedEntityNames = new HashSet(); /**
private final Set affectedCollectionRoles = new HashSet(); * Constructs an action to cleanup "affected cache regions" based on the
private final Serializable[] spaces; * affected entity persisters. The affected regions are defined as the
* region (if any) of the entity persisters themselves, plus the
public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) { * collection regions for any collection in which those entity
this.session = session; * persisters participate as elements/keys/etc.
// TODO : probably better to calculate these and pass them in, as it'll be more performant *
* @param session The session to which this request is tied.
* @param affectedQueryables The affected entity persisters.
*/
public BulkOperationCleanupAction(
SessionImplementor session,
Queryable[] affectedQueryables) {
SessionFactoryImplementor factory = session.getFactory();
ArrayList tmpSpaces = new ArrayList(); ArrayList tmpSpaces = new ArrayList();
for ( int i = 0; i < affectedQueryables.length; i++ ) { for ( int i = 0; i < affectedQueryables.length; i++ ) {
tmpSpaces.addAll( Arrays.asList( affectedQueryables[i].getQuerySpaces() ) );
if ( affectedQueryables[i].hasCache() ) { if ( affectedQueryables[i].hasCache() ) {
affectedEntityNames.add( affectedQueryables[i].getEntityName() ); entityCleanups.add(
new EntityCleanup(
affectedQueryables[i].getCacheAccessStrategy()
)
);
} }
Set roles = session.getFactory().getCollectionRolesByEntityParticipant( affectedQueryables[i].getEntityName() ); Set roles = factory.getCollectionRolesByEntityParticipant( affectedQueryables[i].getEntityName() );
if ( roles != null ) { if ( roles != null ) {
affectedCollectionRoles.addAll( roles ); Iterator itr = roles.iterator();
} while ( itr.hasNext() ) {
for ( int y = 0; y < affectedQueryables[i].getQuerySpaces().length; y++ ) { String role = ( String ) itr.next();
tmpSpaces.add( affectedQueryables[i].getQuerySpaces()[y] ); CollectionPersister collectionPersister = factory.getCollectionPersister( role );
if ( collectionPersister.hasCache() ) {
collectionCleanups.add(
new CollectionCleanup(
collectionPersister.getCacheAccessStrategy()
)
);
}
}
} }
} }
this.spaces = new Serializable[ tmpSpaces.size() ];
for ( int i = 0; i < tmpSpaces.size(); i++ ) { this.affectedTableSpaces = ( Serializable[] ) tmpSpaces.toArray( new Serializable[ tmpSpaces.size() ] );
this.spaces[i] = ( Serializable ) tmpSpaces.get( i );
}
} }
/** Create an action that will evict collection and entity regions based on queryspaces (table names). /**
* TODO: cache the autodetected information and pass it in instead. * Constructs an action to cleanup "affected cache regions" based on a
**/ * set of affected table spaces. This differs from {@link #BulkOperationCleanupAction(SessionImplementor, Queryable[])}
public BulkOperationCleanupAction(SessionImplementor session, Set querySpaces) { * in that here we have the affected <strong>table names</strong>. From those
this.session = session; * we deduce the entity persisters whcih are affected based on the defined
* {@link EntityPersister#getQuerySpaces() table spaces}; and from there, we
Set tmpSpaces = new HashSet(querySpaces); * determine the affected collection regions based on any collections
* in which those entity persisters participate as elements/keys/etc.
*
* @param session The session to which this request is tied.
* @param tableSpaces The table spaces.
*/
public BulkOperationCleanupAction(SessionImplementor session, Set tableSpaces) {
Set tmpSpaces = new HashSet(tableSpaces);
SessionFactoryImplementor factory = session.getFactory(); SessionFactoryImplementor factory = session.getFactory();
Iterator iterator = factory.getAllClassMetadata().entrySet().iterator(); Iterator iterator = factory.getAllClassMetadata().entrySet().iterator();
while ( iterator.hasNext() ) { while ( iterator.hasNext() ) {
@ -87,41 +126,66 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
EntityPersister persister = factory.getEntityPersister( entityName ); EntityPersister persister = factory.getEntityPersister( entityName );
Serializable[] entitySpaces = persister.getQuerySpaces(); Serializable[] entitySpaces = persister.getQuerySpaces();
if (affectedEntity( querySpaces, entitySpaces )) { if ( affectedEntity( tableSpaces, entitySpaces ) ) {
tmpSpaces.addAll( Arrays.asList( entitySpaces ) );
if ( persister.hasCache() ) { if ( persister.hasCache() ) {
affectedEntityNames.add( persister.getEntityName() ); entityCleanups.add(
new EntityCleanup(
persister.getCacheAccessStrategy()
)
);
} }
Set roles = session.getFactory().getCollectionRolesByEntityParticipant( persister.getEntityName() ); Set roles = session.getFactory().getCollectionRolesByEntityParticipant( persister.getEntityName() );
if ( roles != null ) { if ( roles != null ) {
affectedCollectionRoles.addAll( roles ); Iterator itr = roles.iterator();
} while ( itr.hasNext() ) {
for ( int y = 0; y < entitySpaces.length; y++ ) { String role = ( String ) itr.next();
tmpSpaces.add( entitySpaces[y] ); CollectionPersister collectionPersister = factory.getCollectionPersister( role );
if ( collectionPersister.hasCache() ) {
collectionCleanups.add(
new CollectionCleanup(
collectionPersister.getCacheAccessStrategy()
)
);
}
}
} }
} }
} }
this.spaces = (Serializable[]) tmpSpaces.toArray( new Serializable[tmpSpaces.size()] );
this.affectedTableSpaces = ( Serializable[] ) tmpSpaces.toArray( new Serializable[ tmpSpaces.size() ] );
} }
/** returns true if no queryspaces or if there are a match */ /**
private boolean affectedEntity(Set querySpaces, Serializable[] entitySpaces) { * Check to determine whether the table spaces reported by an entity
if(querySpaces==null || querySpaces.isEmpty()) { * persister match against the defined affected table spaces.
*
* @param affectedTableSpaces The table spaces reported to be affected by
* the query.
* @param checkTableSpaces The table spaces (from the entity persister)
* to check against the affected table spaces.
*
* @return True if there are affected table spaces and any of the incoming
* check table spaces occur in that set.
*/
private boolean affectedEntity(
Set affectedTableSpaces,
Serializable[] checkTableSpaces) {
if ( affectedTableSpaces == null || affectedTableSpaces.isEmpty() ) {
return true; return true;
} }
for ( int i = 0; i < entitySpaces.length; i++ ) { for ( int i = 0; i < checkTableSpaces.length; i++ ) {
if ( querySpaces.contains( entitySpaces[i] ) ) { if ( affectedTableSpaces.contains( checkTableSpaces[i] ) ) {
return true; return true;
} }
} }
return false; return false;
} }
public void init() { public Serializable[] getPropertySpaces() {
evictEntityRegions(); return affectedTableSpaces;
evictCollectionRegions();
} }
public boolean hasAfterTransactionCompletion() { public boolean hasAfterTransactionCompletion() {
@ -129,12 +193,17 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
} }
public void afterTransactionCompletion(boolean success) throws HibernateException { public void afterTransactionCompletion(boolean success) throws HibernateException {
evictEntityRegions(); Iterator itr = entityCleanups.iterator();
evictCollectionRegions(); while ( itr.hasNext() ) {
} final EntityCleanup cleanup = ( EntityCleanup ) itr.next();
cleanup.release();
}
public Serializable[] getPropertySpaces() { itr = collectionCleanups.iterator();
return spaces; while ( itr.hasNext() ) {
final CollectionCleanup cleanup = ( CollectionCleanup ) itr.next();
cleanup.release();
}
} }
public void beforeExecutions() throws HibernateException { public void beforeExecutions() throws HibernateException {
@ -145,23 +214,33 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
// nothing to do // nothing to do
} }
private void evictEntityRegions() { private static class EntityCleanup {
if ( affectedEntityNames != null ) { private final EntityRegionAccessStrategy cacheAccess;
Iterator itr = affectedEntityNames.iterator(); private final SoftLock cacheLock;
while ( itr.hasNext() ) {
final String entityName = ( String ) itr.next(); private EntityCleanup(EntityRegionAccessStrategy cacheAccess) {
session.getFactory().evictEntity( entityName ); this.cacheAccess = cacheAccess;
} this.cacheLock = cacheAccess.lockRegion();
cacheAccess.removeAll();
}
private void release() {
cacheAccess.unlockRegion( cacheLock );
} }
} }
private void evictCollectionRegions() { private static class CollectionCleanup {
if ( affectedCollectionRoles != null ) { private final CollectionRegionAccessStrategy cacheAccess;
Iterator itr = affectedCollectionRoles.iterator(); private final SoftLock cacheLock;
while ( itr.hasNext() ) {
final String roleName = ( String ) itr.next(); private CollectionCleanup(CollectionRegionAccessStrategy cacheAccess) {
session.getFactory().evictCollection( roleName ); this.cacheAccess = cacheAccess;
} this.cacheLock = cacheAccess.lockRegion();
cacheAccess.removeAll();
}
private void release() {
cacheAccess.unlockRegion( cacheLock );
} }
} }
} }

View File

@ -95,18 +95,28 @@ public class NativeSQLQueryPlan implements Serializable {
} }
/** /**
* Bind positional parameter values to the <tt>PreparedStatement</tt> * Perform binding of all the JDBC bind parameter values based on the user-defined
* (these are parameters specified by a JDBC-style ?). * positional query parameters (these are the '?'-style hibernate query
* params) into the JDBC {@link PreparedStatement}.
*
* @param st The prepared statement to which to bind the parameter values.
* @param queryParameters The query parameters specified by the application.
* @param start JDBC paramer binds are positional, so this is the position
* from which to start binding.
* @param session The session from which the query originated.
*
* @return The number of JDBC bind positions accounted for during execution.
*
* @throws SQLException Some form of JDBC error binding the values.
* @throws HibernateException Generally indicates a mapping problem or type mismatch.
*/ */
private int bindPositionalParameters(final PreparedStatement st, private int bindPositionalParameters(
final QueryParameters queryParameters, final int start, final PreparedStatement st,
final SessionImplementor session) throws SQLException, final QueryParameters queryParameters,
HibernateException { final int start,
final SessionImplementor session) throws SQLException {
final Object[] values = queryParameters final Object[] values = queryParameters.getFilteredPositionalParameterValues();
.getFilteredPositionalParameterValues(); final Type[] types = queryParameters.getFilteredPositionalParameterTypes();
final Type[] types = queryParameters
.getFilteredPositionalParameterTypes();
int span = 0; int span = 0;
for (int i = 0; i < values.length; i++) { for (int i = 0; i < values.length; i++) {
types[i].nullSafeSet( st, values[i], start + span, session ); types[i].nullSafeSet( st, values[i], start + span, session );
@ -116,15 +126,25 @@ public class NativeSQLQueryPlan implements Serializable {
} }
/** /**
* Bind named parameters to the <tt>PreparedStatement</tt>. This has an * Perform binding of all the JDBC bind parameter values based on the user-defined
* empty implementation on this superclass and should be implemented by * named query parameters into the JDBC {@link PreparedStatement}.
* subclasses (queries) which allow named parameters. *
* @param ps The prepared statement to which to bind the parameter values.
* @param namedParams The named query parameters specified by the application.
* @param start JDBC paramer binds are positional, so this is the position
* from which to start binding.
* @param session The session from which the query originated.
*
* @return The number of JDBC bind positions accounted for during execution.
*
* @throws SQLException Some form of JDBC error binding the values.
* @throws HibernateException Generally indicates a mapping problem or type mismatch.
*/ */
private int bindNamedParameters(final PreparedStatement ps, private int bindNamedParameters(
final Map namedParams, final int start, final PreparedStatement ps,
final SessionImplementor session) throws SQLException, final Map namedParams,
HibernateException { final int start,
final SessionImplementor session) throws SQLException {
if ( namedParams != null ) { if ( namedParams != null ) {
// assumes that types are all of span 1 // assumes that types are all of span 1
Iterator iter = namedParams.entrySet().iterator(); Iterator iter = namedParams.entrySet().iterator();
@ -155,11 +175,12 @@ public class NativeSQLQueryPlan implements Serializable {
protected void coordinateSharedCacheCleanup(SessionImplementor session) { protected void coordinateSharedCacheCleanup(SessionImplementor session) {
BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() ); BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() );
action.init();
if ( session.isEventSource() ) { if ( session.isEventSource() ) {
( ( EventSource ) session ).getActionQueue().addAction( action ); ( ( EventSource ) session ).getActionQueue().addAction( action );
} }
else {
action.afterTransactionCompletion( true );
}
} }
public int performExecuteUpdate(QueryParameters queryParameters, public int performExecuteUpdate(QueryParameters queryParameters,

View File

@ -240,11 +240,12 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
protected void coordinateSharedCacheCleanup(SessionImplementor session) { protected void coordinateSharedCacheCleanup(SessionImplementor session) {
BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() ); BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getAffectedQueryables() );
action.init();
if ( session.isEventSource() ) { if ( session.isEventSource() ) {
( ( EventSource ) session ).getActionQueue().addAction( action ); ( ( EventSource ) session ).getActionQueue().addAction( action );
} }
else {
action.afterTransactionCompletion( true );
}
} }
protected boolean shouldIsolateTemporaryTableDDL() { protected boolean shouldIsolateTemporaryTableDDL() {