HHH-5697 - Support for multi-tenancy
This commit is contained in:
parent
fe8c7183d1
commit
47abaf12fa
|
@ -85,19 +85,14 @@ public abstract class CollectionAction implements Executable, Serializable, Comp
|
|||
|
||||
@Override
|
||||
public final void beforeExecutions() throws CacheException {
|
||||
// we need to obtain the lock before any actions are
|
||||
// executed, since this may be an inverse="true"
|
||||
// bidirectional association and it is one of the
|
||||
// earlier entity actions which actually updates
|
||||
// the database (this action is resposible for
|
||||
// second-level cache invalidation only)
|
||||
// we need to obtain the lock before any actions are executed, since this may be an inverse="true"
|
||||
// bidirectional association and it is one of the earlier entity actions which actually updates
|
||||
// the database (this action is responsible for second-level cache invalidation only)
|
||||
if ( persister.hasCache() ) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = session.generateCacheKey(
|
||||
key,
|
||||
persister.getKeyType(),
|
||||
persister.getRole(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
persister.getRole()
|
||||
);
|
||||
final SoftLock lock = persister.getCacheAccessStrategy().lockItem( ck, null );
|
||||
// the old behavior used key as opposed to getKey()
|
||||
|
@ -145,13 +140,11 @@ public abstract class CollectionAction implements Executable, Serializable, Comp
|
|||
|
||||
protected final void evict() throws CacheException {
|
||||
if ( persister.hasCache() ) {
|
||||
CacheKey ck = new CacheKey(
|
||||
CacheKey ck = session.generateCacheKey(
|
||||
key,
|
||||
persister.getKeyType(),
|
||||
persister.getRole(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
persister.getRole()
|
||||
);
|
||||
persister.getCacheAccessStrategy().remove( ck );
|
||||
}
|
||||
}
|
||||
|
@ -190,12 +183,10 @@ public abstract class CollectionAction implements Executable, Serializable, Comp
|
|||
|
||||
@Override
|
||||
public void doAfterTransactionCompletion(boolean success, SessionImplementor session) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = session.generateCacheKey(
|
||||
key,
|
||||
persister.getKeyType(),
|
||||
persister.getRole(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
persister.getRole()
|
||||
);
|
||||
persister.getCacheAccessStrategy().unlockItem( ck, lock );
|
||||
}
|
||||
|
|
|
@ -79,13 +79,7 @@ public final class EntityDeleteAction extends EntityAction {
|
|||
|
||||
final CacheKey ck;
|
||||
if ( persister.hasCache() ) {
|
||||
ck = new CacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
ck = session.generateCacheKey( id, persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
lock = persister.getCacheAccessStrategy().lockItem( ck, version );
|
||||
}
|
||||
else {
|
||||
|
@ -170,12 +164,10 @@ public final class EntityDeleteAction extends EntityAction {
|
|||
@Override
|
||||
public void doAfterTransactionCompletion(boolean success, SessionImplementor session) throws HibernateException {
|
||||
if ( getPersister().hasCache() ) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = getSession().generateCacheKey(
|
||||
getId(),
|
||||
getPersister().getIdentifierType(),
|
||||
getPersister().getRootEntityName(),
|
||||
getSession().getEntityMode(),
|
||||
getSession().getFactory()
|
||||
getPersister().getRootEntityName()
|
||||
);
|
||||
getPersister().getCacheAccessStrategy().unlockItem( ck, lock );
|
||||
}
|
||||
|
|
|
@ -110,13 +110,7 @@ public final class EntityInsertAction extends EntityAction {
|
|||
);
|
||||
|
||||
cacheEntry = persister.getCacheEntryStructure().structure(ce);
|
||||
final CacheKey ck = new CacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
final CacheKey ck = session.generateCacheKey( id, persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
boolean put = persister.getCacheAccessStrategy().insert( ck, cacheEntry, version );
|
||||
|
||||
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
|
||||
|
@ -185,13 +179,7 @@ public final class EntityInsertAction extends EntityAction {
|
|||
public void doAfterTransactionCompletion(boolean success, SessionImplementor session) throws HibernateException {
|
||||
EntityPersister persister = getPersister();
|
||||
if ( success && isCachePutEnabled( persister, getSession() ) ) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
getId(),
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
getSession().getEntityMode(),
|
||||
getSession().getFactory()
|
||||
);
|
||||
final CacheKey ck = getSession().generateCacheKey( getId(), persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
boolean put = persister.getCacheAccessStrategy().afterInsert( ck, cacheEntry, version );
|
||||
|
||||
if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||
|
|
|
@ -97,12 +97,10 @@ public final class EntityUpdateAction extends EntityAction {
|
|||
|
||||
final CacheKey ck;
|
||||
if ( persister.hasCache() ) {
|
||||
ck = new CacheKey(
|
||||
ck = session.generateCacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
persister.getRootEntityName()
|
||||
);
|
||||
lock = persister.getCacheAccessStrategy().lockItem( ck, previousVersion );
|
||||
}
|
||||
|
@ -251,12 +249,10 @@ public final class EntityUpdateAction extends EntityAction {
|
|||
EntityPersister persister = getPersister();
|
||||
if ( persister.hasCache() ) {
|
||||
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = getSession().generateCacheKey(
|
||||
getId(),
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
getSession().getEntityMode(),
|
||||
getSession().getFactory()
|
||||
persister.getRootEntityName()
|
||||
);
|
||||
|
||||
if ( success && cacheEntry!=null /*!persister.isCacheInvalidationRequired()*/ ) {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
|
@ -20,12 +20,14 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.cache;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.compare.EqualsHelper;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
|
@ -40,6 +42,7 @@ public class CacheKey implements Serializable {
|
|||
private final Type type;
|
||||
private final String entityOrRoleName;
|
||||
private final EntityMode entityMode;
|
||||
private final String tenantId;
|
||||
private final int hashCode;
|
||||
|
||||
/**
|
||||
|
@ -50,7 +53,8 @@ public class CacheKey implements Serializable {
|
|||
* @param id The identifier associated with the cached data
|
||||
* @param type The Hibernate type mapping
|
||||
* @param entityOrRoleName The entity or collection-role name.
|
||||
* @param entityMode The entiyt mode of the originating session
|
||||
* @param entityMode The entity mode of the originating session
|
||||
* @param tenantId The tenant identifier associated this data.
|
||||
* @param factory The session factory for which we are caching
|
||||
*/
|
||||
public CacheKey(
|
||||
|
@ -58,26 +62,34 @@ public class CacheKey implements Serializable {
|
|||
final Type type,
|
||||
final String entityOrRoleName,
|
||||
final EntityMode entityMode,
|
||||
final String tenantId,
|
||||
final SessionFactoryImplementor factory) {
|
||||
this.key = id;
|
||||
this.type = type;
|
||||
this.entityOrRoleName = entityOrRoleName;
|
||||
this.entityMode = entityMode;
|
||||
hashCode = type.getHashCode( key, entityMode, factory );
|
||||
this.tenantId = tenantId;
|
||||
this.hashCode = type.getHashCode( key, entityMode, factory );
|
||||
}
|
||||
|
||||
//Mainly for OSCache
|
||||
@Override
|
||||
public String toString() {
|
||||
// Mainly for OSCache
|
||||
return entityOrRoleName + '#' + key.toString();//"CacheKey#" + type.toString(key, sf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if ( !(other instanceof CacheKey) ) return false;
|
||||
if ( !(other instanceof CacheKey) ) {
|
||||
return false;
|
||||
}
|
||||
CacheKey that = (CacheKey) other;
|
||||
return entityOrRoleName.equals( that.entityOrRoleName )
|
||||
&& type.isEqual( key, that.key, entityMode );
|
||||
return entityOrRoleName.equals( that.entityOrRoleName ) &&
|
||||
type.isEqual( key, that.key, entityMode ) &&
|
||||
EqualsHelper.equals( tenantId, that.tenantId );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2--8-2011, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
|
@ -20,20 +20,21 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.engine;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.collection.PersistentCollection;
|
||||
import org.hibernate.internal.util.MarkerObject;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.internal.util.MarkerObject;
|
||||
|
||||
/**
|
||||
* Tracks entity and collection keys that are available for batch
|
||||
|
@ -198,7 +199,7 @@ public class BatchFetchQueue {
|
|||
end = i;
|
||||
//checkForEnd = false;
|
||||
}
|
||||
else if ( !isCached( ce.getLoadedKey(), collectionPersister, entityMode ) ) {
|
||||
else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) {
|
||||
keys[i++] = ce.getLoadedKey();
|
||||
//count++;
|
||||
}
|
||||
|
@ -248,7 +249,7 @@ public class BatchFetchQueue {
|
|||
end = i;
|
||||
}
|
||||
else {
|
||||
if ( !isCached( key, persister, entityMode ) ) {
|
||||
if ( !isCached( key, persister ) ) {
|
||||
ids[i++] = key.getIdentifier();
|
||||
}
|
||||
}
|
||||
|
@ -261,34 +262,24 @@ public class BatchFetchQueue {
|
|||
return ids; //we ran out of ids to try
|
||||
}
|
||||
|
||||
private boolean isCached(
|
||||
EntityKey entityKey,
|
||||
EntityPersister persister,
|
||||
EntityMode entityMode) {
|
||||
private boolean isCached(EntityKey entityKey, EntityPersister persister) {
|
||||
if ( persister.hasCache() ) {
|
||||
CacheKey key = new CacheKey(
|
||||
CacheKey key = context.getSession().generateCacheKey(
|
||||
entityKey.getIdentifier(),
|
||||
persister.getIdentifierType(),
|
||||
entityKey.getEntityName(),
|
||||
entityMode,
|
||||
context.getSession().getFactory()
|
||||
entityKey.getEntityName()
|
||||
);
|
||||
return persister.getCacheAccessStrategy().get( key, context.getSession().getTimestamp() ) != null;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isCached(
|
||||
Serializable collectionKey,
|
||||
CollectionPersister persister,
|
||||
EntityMode entityMode) {
|
||||
private boolean isCached(Serializable collectionKey, CollectionPersister persister) {
|
||||
if ( persister.hasCache() ) {
|
||||
CacheKey cacheKey = new CacheKey(
|
||||
CacheKey cacheKey = context.getSession().generateCacheKey(
|
||||
collectionKey,
|
||||
persister.getKeyType(),
|
||||
persister.getRole(),
|
||||
entityMode,
|
||||
context.getSession().getFactory()
|
||||
persister.getRole()
|
||||
);
|
||||
return persister.getCacheAccessStrategy().get( cacheKey, context.getSession().getTimestamp() ) != null;
|
||||
}
|
||||
|
|
|
@ -340,17 +340,17 @@ public final class EntityEntry implements Serializable {
|
|||
* @throws IOException If a stream error occurs
|
||||
*/
|
||||
void serialize(ObjectOutputStream oos) throws IOException {
|
||||
oos.writeUTF( entityName );
|
||||
oos.writeObject( entityName );
|
||||
oos.writeObject( id );
|
||||
oos.writeUTF( entityMode.toString() );
|
||||
oos.writeUTF( tenantId );
|
||||
oos.writeUTF( status.toString() );
|
||||
oos.writeUTF( ( previousStatus == null ? "" : previousStatus.toString() ) );
|
||||
oos.writeObject( entityMode.toString() );
|
||||
oos.writeObject( tenantId );
|
||||
oos.writeObject( status.toString() );
|
||||
oos.writeObject( (previousStatus == null ? "" : previousStatus.toString()) );
|
||||
// todo : potentially look at optimizing these two arrays
|
||||
oos.writeObject( loadedState );
|
||||
oos.writeObject( deletedState );
|
||||
oos.writeObject( version );
|
||||
oos.writeUTF( lockMode.toString() );
|
||||
oos.writeObject( lockMode.toString() );
|
||||
oos.writeBoolean( existsInDatabase );
|
||||
oos.writeBoolean( isBeingReplicated );
|
||||
oos.writeBoolean( loadedWithLazyPropertiesUnfetched );
|
||||
|
@ -375,11 +375,11 @@ public final class EntityEntry implements Serializable {
|
|||
String previousStatusString = null;
|
||||
return new EntityEntry(
|
||||
( session == null ? null : session.getFactory() ),
|
||||
ois.readUTF(),
|
||||
(String) ois.readObject(),
|
||||
( Serializable ) ois.readObject(),
|
||||
EntityMode.parse( ois.readUTF() ),
|
||||
ois.readUTF(),
|
||||
Status.parse( ois.readUTF() ),
|
||||
EntityMode.parse( (String) ois.readObject() ),
|
||||
(String) ois.readObject(),
|
||||
Status.parse( (String) ois.readObject() ),
|
||||
( ( previousStatusString = ( String ) ois.readObject() ).length() == 0 ?
|
||||
null :
|
||||
Status.parse( previousStatusString )
|
||||
|
@ -387,7 +387,7 @@ public final class EntityEntry implements Serializable {
|
|||
( Object[] ) ois.readObject(),
|
||||
( Object[] ) ois.readObject(),
|
||||
ois.readObject(),
|
||||
LockMode.parse( ois.readUTF() ),
|
||||
LockMode.parse( (String) ois.readObject() ),
|
||||
ois.readBoolean(),
|
||||
ois.readBoolean(),
|
||||
ois.readBoolean()
|
||||
|
|
|
@ -30,6 +30,7 @@ import java.io.Serializable;
|
|||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.internal.util.compare.EqualsHelper;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.hibernate.type.Type;
|
||||
|
@ -136,8 +137,9 @@ public final class EntityKey implements Serializable {
|
|||
@Override
|
||||
public boolean equals(Object other) {
|
||||
EntityKey otherKey = (EntityKey) other;
|
||||
return otherKey.rootEntityName.equals(this.rootEntityName) &&
|
||||
identifierType.isEqual(otherKey.identifier, this.identifier, entityMode, factory);
|
||||
return otherKey.rootEntityName.equals(this.rootEntityName) &&
|
||||
identifierType.isEqual(otherKey.identifier, this.identifier, entityMode, factory) &&
|
||||
EqualsHelper.equals( tenantId, otherKey.tenantId );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,12 +163,12 @@ public final class EntityKey implements Serializable {
|
|||
*/
|
||||
void serialize(ObjectOutputStream oos) throws IOException {
|
||||
oos.writeObject( identifier );
|
||||
oos.writeUTF( rootEntityName );
|
||||
oos.writeUTF( entityName );
|
||||
oos.writeObject( rootEntityName );
|
||||
oos.writeObject( entityName );
|
||||
oos.writeObject( identifierType );
|
||||
oos.writeBoolean( isBatchLoadable );
|
||||
oos.writeUTF( entityMode.toString() );
|
||||
oos.writeUTF( tenantId );
|
||||
oos.writeObject( entityMode.toString() );
|
||||
oos.writeObject( tenantId );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -186,13 +188,13 @@ public final class EntityKey implements Serializable {
|
|||
SessionImplementor session) throws IOException, ClassNotFoundException {
|
||||
return new EntityKey(
|
||||
( Serializable ) ois.readObject(),
|
||||
ois.readUTF(),
|
||||
ois.readUTF(),
|
||||
(String) ois.readObject(),
|
||||
(String) ois.readObject(),
|
||||
( Type ) ois.readObject(),
|
||||
ois.readBoolean(),
|
||||
( session == null ? null : session.getFactory() ),
|
||||
EntityMode.parse( ois.readUTF() ),
|
||||
ois.readUTF()
|
||||
EntityMode.parse( (String) ois.readObject() ),
|
||||
(String) ois.readObject()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.hibernate.Interceptor;
|
|||
import org.hibernate.Query;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.collection.PersistentCollection;
|
||||
import org.hibernate.engine.jdbc.LobCreationContext;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
|
||||
|
@ -80,6 +81,17 @@ public interface SessionImplementor extends Serializable, LobCreationContext {
|
|||
*/
|
||||
public EntityKey generateEntityKey(Serializable id, EntityPersister persister);
|
||||
|
||||
/**
|
||||
* Hide the changing requirements of cache key creation.
|
||||
*
|
||||
* @param id The entity identifier or collection key.
|
||||
* @param type The type
|
||||
* @param entityOrRoleName The entity name or collection role.
|
||||
*
|
||||
* @return The cache key
|
||||
*/
|
||||
public CacheKey generateCacheKey(Serializable id, final Type type, final String entityOrRoleName);
|
||||
|
||||
/**
|
||||
* Retrieves the interceptor currently in use by this event source.
|
||||
*
|
||||
|
|
|
@ -22,26 +22,29 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.engine;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.bytecode.instrumentation.spi.LazyPropertyInitializer;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.cache.entry.CacheEntry;
|
||||
import org.hibernate.event.PostLoadEvent;
|
||||
import org.hibernate.event.PostLoadEventListener;
|
||||
import org.hibernate.event.PreLoadEvent;
|
||||
import org.hibernate.event.PreLoadEventListener;
|
||||
import org.hibernate.bytecode.instrumentation.spi.LazyPropertyInitializer;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.hibernate.property.BackrefPropertyAccessor;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.hibernate.type.Type;
|
||||
import org.hibernate.type.TypeHelper;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Functionality relating to Hibernate's two-phase loading process,
|
||||
|
@ -161,13 +164,7 @@ public final class TwoPhaseLoad {
|
|||
session,
|
||||
entity
|
||||
);
|
||||
CacheKey cacheKey = new CacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
CacheKey cacheKey = session.generateCacheKey( id, persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
|
||||
// explicit handling of caching for rows just inserted and then somehow forced to be read
|
||||
// from the database *within the same transaction*. usually this is done by
|
||||
|
|
|
@ -1,347 +1,343 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.engine.loading;
|
||||
import java.io.Serializable;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.cache.entry.CollectionCacheEntry;
|
||||
import org.hibernate.collection.PersistentCollection;
|
||||
import org.hibernate.engine.CollectionEntry;
|
||||
import org.hibernate.engine.CollectionKey;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.SessionImplementor;
|
||||
import org.hibernate.engine.Status;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Represents state associated with the processing of a given {@link ResultSet}
|
||||
* in regards to loading collections.
|
||||
* <p/>
|
||||
* Another implementation option to consider is to not expose {@link ResultSet}s
|
||||
* directly (in the JDBC redesign) but to always "wrap" them and apply a
|
||||
* [series of] context[s] to that wrapper.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CollectionLoadContext {
|
||||
|
||||
private static final HibernateLogger LOG = Logger.getMessageLogger(HibernateLogger.class, CollectionLoadContext.class.getName());
|
||||
|
||||
private final LoadContexts loadContexts;
|
||||
private final ResultSet resultSet;
|
||||
private Set localLoadingCollectionKeys = new HashSet();
|
||||
|
||||
/**
|
||||
* Creates a collection load context for the given result set.
|
||||
*
|
||||
* @param loadContexts Callback to other collection load contexts.
|
||||
* @param resultSet The result set this is "wrapping".
|
||||
*/
|
||||
public CollectionLoadContext(LoadContexts loadContexts, ResultSet resultSet) {
|
||||
this.loadContexts = loadContexts;
|
||||
this.resultSet = resultSet;
|
||||
}
|
||||
|
||||
public ResultSet getResultSet() {
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
public LoadContexts getLoadContext() {
|
||||
return loadContexts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the collection that is being loaded as part of processing this
|
||||
* result set.
|
||||
* <p/>
|
||||
* Basically, there are two valid return values from this method:<ul>
|
||||
* <li>an instance of {@link PersistentCollection} which indicates to
|
||||
* continue loading the result set row data into that returned collection
|
||||
* instance; this may be either an instance already associated and in the
|
||||
* midst of being loaded, or a newly instantiated instance as a matching
|
||||
* associated collection was not found.</li>
|
||||
* <li><i>null</i> indicates to ignore the corresponding result set row
|
||||
* data relating to the requested collection; this indicates that either
|
||||
* the collection was found to already be associated with the persistence
|
||||
* context in a fully loaded state, or it was found in a loading state
|
||||
* associated with another result set processing context.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param persister The persister for the collection being requested.
|
||||
* @param key The key of the collection being requested.
|
||||
*
|
||||
* @return The loading collection (see discussion above).
|
||||
*/
|
||||
public PersistentCollection getLoadingCollection(final CollectionPersister persister, final Serializable key) {
|
||||
final EntityMode em = loadContexts.getPersistenceContext().getSession().getEntityMode();
|
||||
final CollectionKey collectionKey = new CollectionKey( persister, key, em );
|
||||
if (LOG.isTraceEnabled()) LOG.trace("Starting attempt to find loading collection ["
|
||||
+ MessageHelper.collectionInfoString(persister.getRole(), key) + "]");
|
||||
final LoadingCollectionEntry loadingCollectionEntry = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
||||
if ( loadingCollectionEntry == null ) {
|
||||
// look for existing collection as part of the persistence context
|
||||
PersistentCollection collection = loadContexts.getPersistenceContext().getCollection( collectionKey );
|
||||
if ( collection != null ) {
|
||||
if ( collection.wasInitialized() ) {
|
||||
LOG.trace("Collection already initialized; ignoring");
|
||||
return null; // ignore this row of results! Note the early exit
|
||||
}
|
||||
LOG.trace("Collection not yet initialized; initializing");
|
||||
}
|
||||
else {
|
||||
Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister );
|
||||
final boolean newlySavedEntity = owner != null
|
||||
&& loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING
|
||||
&& em != EntityMode.DOM4J;
|
||||
if ( newlySavedEntity ) {
|
||||
// important, to account for newly saved entities in query
|
||||
// todo : some kind of check for new status...
|
||||
LOG.trace("Owning entity already loaded; ignoring");
|
||||
return null;
|
||||
}
|
||||
// create one
|
||||
LOG.trace("Instantiating new collection [key=" + key + ", rs=" + resultSet + "]");
|
||||
collection = persister.getCollectionType().instantiate(loadContexts.getPersistenceContext().getSession(),
|
||||
persister,
|
||||
key);
|
||||
}
|
||||
collection.beforeInitialize( persister, -1 );
|
||||
collection.beginRead();
|
||||
localLoadingCollectionKeys.add( collectionKey );
|
||||
loadContexts.registerLoadingCollectionXRef( collectionKey, new LoadingCollectionEntry( resultSet, persister, key, collection ) );
|
||||
return collection;
|
||||
}
|
||||
if (loadingCollectionEntry.getResultSet() == resultSet) {
|
||||
LOG.trace("Found loading collection bound to current result set processing; reading row");
|
||||
return loadingCollectionEntry.getCollection();
|
||||
}
|
||||
// ignore this row, the collection is in process of
|
||||
// being loaded somewhere further "up" the stack
|
||||
LOG.trace("Collection is already being initialized; ignoring row");
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finish the process of collection-loading for this bound result set. Mainly this
|
||||
* involves cleaning up resources and notifying the collections that loading is
|
||||
* complete.
|
||||
*
|
||||
* @param persister The persister for which to complete loading.
|
||||
*/
|
||||
public void endLoadingCollections(CollectionPersister persister) {
|
||||
SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
if ( !loadContexts.hasLoadingCollectionEntries()
|
||||
&& localLoadingCollectionKeys.isEmpty() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
// in an effort to avoid concurrent-modification-exceptions (from
|
||||
// potential recursive calls back through here as a result of the
|
||||
// eventual call to PersistentCollection#endRead), we scan the
|
||||
// internal loadingCollections map for matches and store those matches
|
||||
// in a temp collection. the temp collection is then used to "drive"
|
||||
// the #endRead processing.
|
||||
List matches = null;
|
||||
Iterator iter = localLoadingCollectionKeys.iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
final CollectionKey collectionKey = (CollectionKey) iter.next();
|
||||
final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
||||
if (lce == null) LOG.loadingCollectionKeyNotFound(collectionKey);
|
||||
else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) {
|
||||
if ( matches == null ) {
|
||||
matches = new ArrayList();
|
||||
}
|
||||
matches.add( lce );
|
||||
if ( lce.getCollection().getOwner() == null ) {
|
||||
session.getPersistenceContext().addUnownedCollection(
|
||||
new CollectionKey( persister, lce.getKey(), session.getEntityMode() ),
|
||||
lce.getCollection()
|
||||
);
|
||||
}
|
||||
LOG.trace("Removing collection load entry [" + lce + "]");
|
||||
|
||||
// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
|
||||
loadContexts.unregisterLoadingCollectionXRef( collectionKey );
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
||||
endLoadingCollections( persister, matches );
|
||||
if ( localLoadingCollectionKeys.isEmpty() ) {
|
||||
// todo : hack!!!
|
||||
// NOTE : here we cleanup the load context when we have no more local
|
||||
// LCE entries. This "works" for the time being because really
|
||||
// only the collection load contexts are implemented. Long term,
|
||||
// this cleanup should become part of the "close result set"
|
||||
// processing from the (sandbox/jdbc) jdbc-container code.
|
||||
loadContexts.cleanup( resultSet );
|
||||
}
|
||||
}
|
||||
|
||||
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
|
||||
if ( matchedCollectionEntries == null ) {
|
||||
LOG.debugf("No collections were found in result set for role: %s", persister.getRole());
|
||||
return;
|
||||
}
|
||||
|
||||
final int count = matchedCollectionEntries.size();
|
||||
LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
|
||||
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
|
||||
endLoadingCollection( lce, persister );
|
||||
}
|
||||
|
||||
LOG.debugf("%s collections initialized for role: %s", count, persister.getRole());
|
||||
}
|
||||
|
||||
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||
LOG.trace("Ending loading collection [" + lce + "]");
|
||||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
final EntityMode em = session.getEntityMode();
|
||||
|
||||
boolean hasNoQueuedAdds = lce.getCollection().endRead(); // warning: can cause a recursive calls! (proxy initialization)
|
||||
|
||||
if ( persister.getCollectionType().hasHolder( em ) ) {
|
||||
getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() );
|
||||
}
|
||||
|
||||
CollectionEntry ce = getLoadContext().getPersistenceContext().getCollectionEntry( lce.getCollection() );
|
||||
if ( ce == null ) {
|
||||
ce = getLoadContext().getPersistenceContext().addInitializedCollection( persister, lce.getCollection(), lce.getKey() );
|
||||
}
|
||||
else {
|
||||
ce.postInitialize( lce.getCollection() );
|
||||
}
|
||||
|
||||
boolean addToCache = hasNoQueuedAdds && // there were no queued additions
|
||||
persister.hasCache() && // and the role has a cache
|
||||
session.getCacheMode().isPutEnabled() &&
|
||||
!ce.isDoremove(); // and this is not a forced initialization during flush
|
||||
if (addToCache) addCollectionToCache(lce, persister);
|
||||
|
||||
if (LOG.isDebugEnabled()) LOG.debugf("Collection fully initialized: %s",
|
||||
MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory()));
|
||||
if (session.getFactory().getStatistics().isStatisticsEnabled()) session.getFactory().getStatisticsImplementor().loadCollection(persister.getRole());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the collection to the second-level cache
|
||||
*
|
||||
* @param lce The entry representing the collection to add
|
||||
* @param persister The persister
|
||||
*/
|
||||
private void addCollectionToCache(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
|
||||
if (LOG.isDebugEnabled()) LOG.debugf("Caching collection: %s",
|
||||
MessageHelper.collectionInfoString(persister, lce.getKey(), factory));
|
||||
|
||||
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
|
||||
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
|
||||
LOG.debugf("Refusing to add to cache due to enabled filters");
|
||||
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
|
||||
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
|
||||
// currently this works in conjuction with the check on
|
||||
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
|
||||
// cache with enabled filters).
|
||||
return; // EARLY EXIT!!!!!
|
||||
}
|
||||
|
||||
final Object version;
|
||||
if ( persister.isVersioned() ) {
|
||||
Object collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( lce.getKey(), persister );
|
||||
if ( collectionOwner == null ) {
|
||||
// generally speaking this would be caused by the collection key being defined by a property-ref, thus
|
||||
// the collection key and the owner key would not match up. In this case, try to use the key of the
|
||||
// owner instance associated with the collection itself, if one. If the collection does already know
|
||||
// about its owner, that owner should be the same instance as associated with the PC, but we do the
|
||||
// resolution against the PC anyway just to be safe since the lookup should not be costly.
|
||||
if ( lce.getCollection() != null ) {
|
||||
Object linkedOwner = lce.getCollection().getOwner();
|
||||
if ( linkedOwner != null ) {
|
||||
final Serializable ownerKey = persister.getOwnerEntityPersister().getIdentifier( linkedOwner, session );
|
||||
collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( ownerKey, persister );
|
||||
}
|
||||
}
|
||||
if ( collectionOwner == null ) {
|
||||
throw new HibernateException(
|
||||
"Unable to resolve owner of loading collection [" +
|
||||
MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) +
|
||||
"] for second level caching"
|
||||
);
|
||||
}
|
||||
}
|
||||
version = getLoadContext().getPersistenceContext().getEntry( collectionOwner ).getVersion();
|
||||
}
|
||||
else {
|
||||
version = null;
|
||||
}
|
||||
|
||||
CollectionCacheEntry entry = new CollectionCacheEntry( lce.getCollection(), persister );
|
||||
CacheKey cacheKey = new CacheKey(
|
||||
lce.getKey(),
|
||||
persister.getKeyType(),
|
||||
persister.getRole(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
boolean put = persister.getCacheAccessStrategy().putFromLoad(
|
||||
cacheKey,
|
||||
persister.getCacheEntryStructure().structure(entry),
|
||||
session.getTimestamp(),
|
||||
version,
|
||||
factory.getSettings().isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH
|
||||
);
|
||||
|
||||
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
|
||||
factory.getStatisticsImplementor().secondLevelCachePut( persister.getCacheAccessStrategy().getRegion().getName() );
|
||||
}
|
||||
}
|
||||
|
||||
void cleanup() {
|
||||
if (!localLoadingCollectionKeys.isEmpty()) LOG.localLoadingCollectionKeysCount(localLoadingCollectionKeys.size());
|
||||
loadContexts.cleanupCollectionXRefs( localLoadingCollectionKeys );
|
||||
localLoadingCollectionKeys.clear();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return super.toString() + "<rs=" + resultSet + ">";
|
||||
}
|
||||
}
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.cache.entry.CollectionCacheEntry;
|
||||
import org.hibernate.collection.PersistentCollection;
|
||||
import org.hibernate.engine.CollectionEntry;
|
||||
import org.hibernate.engine.CollectionKey;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.SessionImplementor;
|
||||
import org.hibernate.engine.Status;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
|
||||
/**
|
||||
* Represents state associated with the processing of a given {@link ResultSet}
|
||||
* in regards to loading collections.
|
||||
* <p/>
|
||||
* Another implementation option to consider is to not expose {@link ResultSet}s
|
||||
* directly (in the JDBC redesign) but to always "wrap" them and apply a
|
||||
* [series of] context[s] to that wrapper.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class CollectionLoadContext {
|
||||
|
||||
private static final HibernateLogger LOG = Logger.getMessageLogger(HibernateLogger.class, CollectionLoadContext.class.getName());
|
||||
|
||||
private final LoadContexts loadContexts;
|
||||
private final ResultSet resultSet;
|
||||
private Set localLoadingCollectionKeys = new HashSet();
|
||||
|
||||
/**
|
||||
* Creates a collection load context for the given result set.
|
||||
*
|
||||
* @param loadContexts Callback to other collection load contexts.
|
||||
* @param resultSet The result set this is "wrapping".
|
||||
*/
|
||||
public CollectionLoadContext(LoadContexts loadContexts, ResultSet resultSet) {
|
||||
this.loadContexts = loadContexts;
|
||||
this.resultSet = resultSet;
|
||||
}
|
||||
|
||||
public ResultSet getResultSet() {
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
public LoadContexts getLoadContext() {
|
||||
return loadContexts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the collection that is being loaded as part of processing this
|
||||
* result set.
|
||||
* <p/>
|
||||
* Basically, there are two valid return values from this method:<ul>
|
||||
* <li>an instance of {@link PersistentCollection} which indicates to
|
||||
* continue loading the result set row data into that returned collection
|
||||
* instance; this may be either an instance already associated and in the
|
||||
* midst of being loaded, or a newly instantiated instance as a matching
|
||||
* associated collection was not found.</li>
|
||||
* <li><i>null</i> indicates to ignore the corresponding result set row
|
||||
* data relating to the requested collection; this indicates that either
|
||||
* the collection was found to already be associated with the persistence
|
||||
* context in a fully loaded state, or it was found in a loading state
|
||||
* associated with another result set processing context.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param persister The persister for the collection being requested.
|
||||
* @param key The key of the collection being requested.
|
||||
*
|
||||
* @return The loading collection (see discussion above).
|
||||
*/
|
||||
public PersistentCollection getLoadingCollection(final CollectionPersister persister, final Serializable key) {
|
||||
final EntityMode em = loadContexts.getPersistenceContext().getSession().getEntityMode();
|
||||
final CollectionKey collectionKey = new CollectionKey( persister, key, em );
|
||||
if (LOG.isTraceEnabled()) LOG.trace("Starting attempt to find loading collection ["
|
||||
+ MessageHelper.collectionInfoString(persister.getRole(), key) + "]");
|
||||
final LoadingCollectionEntry loadingCollectionEntry = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
||||
if ( loadingCollectionEntry == null ) {
|
||||
// look for existing collection as part of the persistence context
|
||||
PersistentCollection collection = loadContexts.getPersistenceContext().getCollection( collectionKey );
|
||||
if ( collection != null ) {
|
||||
if ( collection.wasInitialized() ) {
|
||||
LOG.trace("Collection already initialized; ignoring");
|
||||
return null; // ignore this row of results! Note the early exit
|
||||
}
|
||||
LOG.trace("Collection not yet initialized; initializing");
|
||||
}
|
||||
else {
|
||||
Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister );
|
||||
final boolean newlySavedEntity = owner != null
|
||||
&& loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING
|
||||
&& em != EntityMode.DOM4J;
|
||||
if ( newlySavedEntity ) {
|
||||
// important, to account for newly saved entities in query
|
||||
// todo : some kind of check for new status...
|
||||
LOG.trace("Owning entity already loaded; ignoring");
|
||||
return null;
|
||||
}
|
||||
// create one
|
||||
LOG.trace("Instantiating new collection [key=" + key + ", rs=" + resultSet + "]");
|
||||
collection = persister.getCollectionType().instantiate(loadContexts.getPersistenceContext().getSession(),
|
||||
persister,
|
||||
key);
|
||||
}
|
||||
collection.beforeInitialize( persister, -1 );
|
||||
collection.beginRead();
|
||||
localLoadingCollectionKeys.add( collectionKey );
|
||||
loadContexts.registerLoadingCollectionXRef( collectionKey, new LoadingCollectionEntry( resultSet, persister, key, collection ) );
|
||||
return collection;
|
||||
}
|
||||
if (loadingCollectionEntry.getResultSet() == resultSet) {
|
||||
LOG.trace("Found loading collection bound to current result set processing; reading row");
|
||||
return loadingCollectionEntry.getCollection();
|
||||
}
|
||||
// ignore this row, the collection is in process of
|
||||
// being loaded somewhere further "up" the stack
|
||||
LOG.trace("Collection is already being initialized; ignoring row");
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finish the process of collection-loading for this bound result set. Mainly this
|
||||
* involves cleaning up resources and notifying the collections that loading is
|
||||
* complete.
|
||||
*
|
||||
* @param persister The persister for which to complete loading.
|
||||
*/
|
||||
public void endLoadingCollections(CollectionPersister persister) {
|
||||
SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
if ( !loadContexts.hasLoadingCollectionEntries()
|
||||
&& localLoadingCollectionKeys.isEmpty() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
// in an effort to avoid concurrent-modification-exceptions (from
|
||||
// potential recursive calls back through here as a result of the
|
||||
// eventual call to PersistentCollection#endRead), we scan the
|
||||
// internal loadingCollections map for matches and store those matches
|
||||
// in a temp collection. the temp collection is then used to "drive"
|
||||
// the #endRead processing.
|
||||
List matches = null;
|
||||
Iterator iter = localLoadingCollectionKeys.iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
final CollectionKey collectionKey = (CollectionKey) iter.next();
|
||||
final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey );
|
||||
if (lce == null) LOG.loadingCollectionKeyNotFound(collectionKey);
|
||||
else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) {
|
||||
if ( matches == null ) {
|
||||
matches = new ArrayList();
|
||||
}
|
||||
matches.add( lce );
|
||||
if ( lce.getCollection().getOwner() == null ) {
|
||||
session.getPersistenceContext().addUnownedCollection(
|
||||
new CollectionKey( persister, lce.getKey(), session.getEntityMode() ),
|
||||
lce.getCollection()
|
||||
);
|
||||
}
|
||||
LOG.trace("Removing collection load entry [" + lce + "]");
|
||||
|
||||
// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
|
||||
loadContexts.unregisterLoadingCollectionXRef( collectionKey );
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
||||
endLoadingCollections( persister, matches );
|
||||
if ( localLoadingCollectionKeys.isEmpty() ) {
|
||||
// todo : hack!!!
|
||||
// NOTE : here we cleanup the load context when we have no more local
|
||||
// LCE entries. This "works" for the time being because really
|
||||
// only the collection load contexts are implemented. Long term,
|
||||
// this cleanup should become part of the "close result set"
|
||||
// processing from the (sandbox/jdbc) jdbc-container code.
|
||||
loadContexts.cleanup( resultSet );
|
||||
}
|
||||
}
|
||||
|
||||
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
|
||||
if ( matchedCollectionEntries == null ) {
|
||||
LOG.debugf("No collections were found in result set for role: %s", persister.getRole());
|
||||
return;
|
||||
}
|
||||
|
||||
final int count = matchedCollectionEntries.size();
|
||||
LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
|
||||
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
|
||||
endLoadingCollection( lce, persister );
|
||||
}
|
||||
|
||||
LOG.debugf("%s collections initialized for role: %s", count, persister.getRole());
|
||||
}
|
||||
|
||||
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||
LOG.trace("Ending loading collection [" + lce + "]");
|
||||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
final EntityMode em = session.getEntityMode();
|
||||
|
||||
boolean hasNoQueuedAdds = lce.getCollection().endRead(); // warning: can cause a recursive calls! (proxy initialization)
|
||||
|
||||
if ( persister.getCollectionType().hasHolder( em ) ) {
|
||||
getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() );
|
||||
}
|
||||
|
||||
CollectionEntry ce = getLoadContext().getPersistenceContext().getCollectionEntry( lce.getCollection() );
|
||||
if ( ce == null ) {
|
||||
ce = getLoadContext().getPersistenceContext().addInitializedCollection( persister, lce.getCollection(), lce.getKey() );
|
||||
}
|
||||
else {
|
||||
ce.postInitialize( lce.getCollection() );
|
||||
}
|
||||
|
||||
boolean addToCache = hasNoQueuedAdds && // there were no queued additions
|
||||
persister.hasCache() && // and the role has a cache
|
||||
session.getCacheMode().isPutEnabled() &&
|
||||
!ce.isDoremove(); // and this is not a forced initialization during flush
|
||||
if (addToCache) addCollectionToCache(lce, persister);
|
||||
|
||||
if (LOG.isDebugEnabled()) LOG.debugf("Collection fully initialized: %s",
|
||||
MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory()));
|
||||
if (session.getFactory().getStatistics().isStatisticsEnabled()) session.getFactory().getStatisticsImplementor().loadCollection(persister.getRole());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the collection to the second-level cache
|
||||
*
|
||||
* @param lce The entry representing the collection to add
|
||||
* @param persister The persister
|
||||
*/
|
||||
private void addCollectionToCache(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
|
||||
if (LOG.isDebugEnabled()) LOG.debugf("Caching collection: %s",
|
||||
MessageHelper.collectionInfoString(persister, lce.getKey(), factory));
|
||||
|
||||
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
|
||||
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
|
||||
LOG.debugf("Refusing to add to cache due to enabled filters");
|
||||
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
|
||||
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
|
||||
// currently this works in conjuction with the check on
|
||||
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
|
||||
// cache with enabled filters).
|
||||
return; // EARLY EXIT!!!!!
|
||||
}
|
||||
|
||||
final Object version;
|
||||
if ( persister.isVersioned() ) {
|
||||
Object collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( lce.getKey(), persister );
|
||||
if ( collectionOwner == null ) {
|
||||
// generally speaking this would be caused by the collection key being defined by a property-ref, thus
|
||||
// the collection key and the owner key would not match up. In this case, try to use the key of the
|
||||
// owner instance associated with the collection itself, if one. If the collection does already know
|
||||
// about its owner, that owner should be the same instance as associated with the PC, but we do the
|
||||
// resolution against the PC anyway just to be safe since the lookup should not be costly.
|
||||
if ( lce.getCollection() != null ) {
|
||||
Object linkedOwner = lce.getCollection().getOwner();
|
||||
if ( linkedOwner != null ) {
|
||||
final Serializable ownerKey = persister.getOwnerEntityPersister().getIdentifier( linkedOwner, session );
|
||||
collectionOwner = getLoadContext().getPersistenceContext().getCollectionOwner( ownerKey, persister );
|
||||
}
|
||||
}
|
||||
if ( collectionOwner == null ) {
|
||||
throw new HibernateException(
|
||||
"Unable to resolve owner of loading collection [" +
|
||||
MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) +
|
||||
"] for second level caching"
|
||||
);
|
||||
}
|
||||
}
|
||||
version = getLoadContext().getPersistenceContext().getEntry( collectionOwner ).getVersion();
|
||||
}
|
||||
else {
|
||||
version = null;
|
||||
}
|
||||
|
||||
CollectionCacheEntry entry = new CollectionCacheEntry( lce.getCollection(), persister );
|
||||
CacheKey cacheKey = session.generateCacheKey( lce.getKey(), persister.getKeyType(), persister.getRole() );
|
||||
boolean put = persister.getCacheAccessStrategy().putFromLoad(
|
||||
cacheKey,
|
||||
persister.getCacheEntryStructure().structure(entry),
|
||||
session.getTimestamp(),
|
||||
version,
|
||||
factory.getSettings().isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH
|
||||
);
|
||||
|
||||
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
|
||||
factory.getStatisticsImplementor().secondLevelCachePut( persister.getCacheAccessStrategy().getRegion().getName() );
|
||||
}
|
||||
}
|
||||
|
||||
void cleanup() {
|
||||
if (!localLoadingCollectionKeys.isEmpty()) LOG.localLoadingCollectionKeysCount(localLoadingCollectionKeys.size());
|
||||
loadContexts.cleanupCollectionXRefs( localLoadingCollectionKeys );
|
||||
localLoadingCollectionKeys.clear();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return super.toString() + "<rs=" + resultSet + ">";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
|
@ -20,9 +20,11 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.event.def;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
|
@ -34,7 +36,6 @@ import org.hibernate.engine.Status;
|
|||
import org.hibernate.event.EventSource;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* A convenience base class for listeners that respond to requests to perform a
|
||||
|
@ -79,13 +80,7 @@ public class AbstractLockUpgradeEventListener extends AbstractReassociateEventLi
|
|||
final SoftLock lock;
|
||||
final CacheKey ck;
|
||||
if ( persister.hasCache() ) {
|
||||
ck = new CacheKey(
|
||||
entry.getId(),
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
source.getEntityMode(),
|
||||
source.getFactory()
|
||||
);
|
||||
ck = source.generateCacheKey( entry.getId(), persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
lock = persister.getCacheAccessStrategy().lockItem( ck, entry.getVersion() );
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
|
@ -20,10 +20,13 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.event.def;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
|
@ -37,7 +40,6 @@ import org.hibernate.event.InitializeCollectionEvent;
|
|||
import org.hibernate.event.InitializeCollectionEventListener;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* @author Gavin King
|
||||
|
@ -115,22 +117,23 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
|
|||
|
||||
final SessionFactoryImplementor factory = source.getFactory();
|
||||
|
||||
final CacheKey ck = new CacheKey(id, persister.getKeyType(), persister.getRole(), source.getEntityMode(),
|
||||
source.getFactory());
|
||||
final CacheKey ck = source.generateCacheKey( id, persister.getKeyType(), persister.getRole() );
|
||||
Object ce = persister.getCacheAccessStrategy().get(ck, source.getTimestamp());
|
||||
|
||||
if (factory.getStatistics().isStatisticsEnabled()) {
|
||||
if ( factory.getStatistics().isStatisticsEnabled() ) {
|
||||
if (ce == null) {
|
||||
factory.getStatisticsImplementor().secondLevelCacheMiss(persister.getCacheAccessStrategy().getRegion().getName());
|
||||
} else {
|
||||
factory.getStatisticsImplementor().secondLevelCacheHit(persister.getCacheAccessStrategy().getRegion().getName()
|
||||
);
|
||||
factory.getStatisticsImplementor()
|
||||
.secondLevelCacheMiss( persister.getCacheAccessStrategy().getRegion().getName() );
|
||||
}
|
||||
else {
|
||||
factory.getStatisticsImplementor()
|
||||
.secondLevelCacheHit( persister.getCacheAccessStrategy().getRegion().getName() );
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
if (ce == null) return false;
|
||||
if ( ce == null ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CollectionCacheEntry cacheEntry = (CollectionCacheEntry)persister.getCacheEntryStructure().destructure(ce, factory);
|
||||
|
||||
|
|
|
@ -351,12 +351,10 @@ public class DefaultLoadEventListener extends AbstractLockUpgradeEventListener i
|
|||
SoftLock lock = null;
|
||||
final CacheKey ck;
|
||||
if ( persister.hasCache() ) {
|
||||
ck = new CacheKey(
|
||||
ck = source.generateCacheKey(
|
||||
event.getEntityId(),
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
source.getEntityMode(),
|
||||
source.getFactory()
|
||||
persister.getRootEntityName()
|
||||
);
|
||||
lock = persister.getCacheAccessStrategy().lockItem( ck, null );
|
||||
}
|
||||
|
@ -535,12 +533,10 @@ public class DefaultLoadEventListener extends AbstractLockUpgradeEventListener i
|
|||
|
||||
final SessionFactoryImplementor factory = source.getFactory();
|
||||
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = source.generateCacheKey(
|
||||
event.getEntityId(),
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
source.getEntityMode(),
|
||||
source.getFactory()
|
||||
persister.getRootEntityName()
|
||||
);
|
||||
Object ce = persister.getCacheAccessStrategy().get( ck, source.getTimestamp() );
|
||||
if ( factory.getStatistics().isStatisticsEnabled() ) {
|
||||
|
|
|
@ -127,12 +127,10 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
}
|
||||
|
||||
if ( persister.hasCache() ) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
final CacheKey ck = source.generateCacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
source.getEntityMode(),
|
||||
source.getFactory()
|
||||
persister.getRootEntityName()
|
||||
);
|
||||
persister.getCacheAccessStrategy().evict( ck );
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.hibernate.SQLQuery;
|
|||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.SessionException;
|
||||
import org.hibernate.SharedSessionContract;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.engine.EntityKey;
|
||||
import org.hibernate.engine.NamedQueryDefinition;
|
||||
import org.hibernate.engine.NamedSQLQueryDefinition;
|
||||
|
@ -54,6 +55,7 @@ import org.hibernate.jdbc.WorkExecutorVisitable;
|
|||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
|
||||
import org.hibernate.service.jdbc.connections.spi.MultiTenantConnectionProvider;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
* Functionality common to stateless and stateful sessions
|
||||
|
@ -234,6 +236,11 @@ public abstract class AbstractSessionImpl implements Serializable, SharedSession
|
|||
return new EntityKey( id, persister, getEntityMode(), getTenantIdentifier() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheKey generateCacheKey(Serializable id, Type type, String entityOrRoleName) {
|
||||
return new CacheKey( id, type, entityOrRoleName, getEntityMode(), getTenantIdentifier(), getFactory() );
|
||||
}
|
||||
|
||||
private transient JdbcConnectionAccess jdbcConnectionAccess;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,6 +23,9 @@
|
|||
*/
|
||||
package org.hibernate.impl;
|
||||
|
||||
import javax.naming.NamingException;
|
||||
import javax.naming.Reference;
|
||||
import javax.naming.StringRefAddr;
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
|
@ -41,9 +44,9 @@ import java.util.Properties;
|
|||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import javax.naming.NamingException;
|
||||
import javax.naming.Reference;
|
||||
import javax.naming.StringRefAddr;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.Cache;
|
||||
import org.hibernate.ConnectionReleaseMode;
|
||||
|
@ -53,7 +56,6 @@ import org.hibernate.HibernateException;
|
|||
import org.hibernate.HibernateLogger;
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.MultiTenancyStrategy;
|
||||
import org.hibernate.ObjectNotFoundException;
|
||||
import org.hibernate.QueryException;
|
||||
import org.hibernate.Session;
|
||||
|
@ -119,8 +121,8 @@ import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
|
|||
import org.hibernate.service.jta.platform.spi.JtaPlatform;
|
||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||
import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory;
|
||||
import org.hibernate.stat.internal.ConcurrentStatisticsImpl;
|
||||
import org.hibernate.stat.Statistics;
|
||||
import org.hibernate.stat.internal.ConcurrentStatisticsImpl;
|
||||
import org.hibernate.stat.spi.StatisticsImplementor;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||
|
@ -129,7 +131,6 @@ import org.hibernate.tuple.entity.EntityTuplizer;
|
|||
import org.hibernate.type.AssociationType;
|
||||
import org.hibernate.type.Type;
|
||||
import org.hibernate.type.TypeResolver;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -1006,7 +1007,8 @@ public final class SessionFactoryImpl
|
|||
identifier,
|
||||
p.getIdentifierType(),
|
||||
p.getRootEntityName(),
|
||||
EntityMode.POJO,
|
||||
EntityMode.POJO, // we have to assume POJO
|
||||
null, // and also assume non tenancy
|
||||
SessionFactoryImpl.this
|
||||
);
|
||||
}
|
||||
|
@ -1051,7 +1053,8 @@ public final class SessionFactoryImpl
|
|||
ownerIdentifier,
|
||||
p.getKeyType(),
|
||||
p.getRole(),
|
||||
EntityMode.POJO,
|
||||
EntityMode.POJO, // we have to assume POJO
|
||||
null, // and also assume non tenancy
|
||||
SessionFactoryImpl.this
|
||||
);
|
||||
}
|
||||
|
|
|
@ -42,6 +42,9 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.ConnectionReleaseMode;
|
||||
import org.hibernate.Criteria;
|
||||
|
@ -122,10 +125,10 @@ import org.hibernate.event.ReplicateEventListener;
|
|||
import org.hibernate.event.SaveOrUpdateEvent;
|
||||
import org.hibernate.event.SaveOrUpdateEventListener;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.jdbc.WorkExecutorVisitable;
|
||||
import org.hibernate.jdbc.ReturningWork;
|
||||
import org.hibernate.jdbc.Work;
|
||||
import org.hibernate.jdbc.WorkExecutor;
|
||||
import org.hibernate.jdbc.WorkExecutorVisitable;
|
||||
import org.hibernate.loader.criteria.CriteriaLoader;
|
||||
import org.hibernate.loader.custom.CustomLoader;
|
||||
import org.hibernate.loader.custom.CustomQuery;
|
||||
|
@ -139,7 +142,6 @@ import org.hibernate.stat.SessionStatistics;
|
|||
import org.hibernate.stat.internal.SessionStatisticsImpl;
|
||||
import org.hibernate.type.SerializationException;
|
||||
import org.hibernate.type.Type;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Concrete implementation of a Session.
|
||||
|
@ -155,10 +157,7 @@ import org.jboss.logging.Logger;
|
|||
*/
|
||||
public final class SessionImpl
|
||||
extends AbstractSessionImpl
|
||||
implements EventSource,
|
||||
org.hibernate.Session,
|
||||
TransactionContext,
|
||||
LobCreationContext {
|
||||
implements EventSource, org.hibernate.Session, TransactionContext, LobCreationContext {
|
||||
|
||||
// todo : need to find a clean way to handle the "event source" role
|
||||
// a separate class responsible for generating/dispatching events just duplicates most of the Session methods...
|
||||
|
|
|
@ -227,13 +227,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
|
|||
// }
|
||||
|
||||
if ( persister.hasCache() ) {
|
||||
final CacheKey ck = new CacheKey(
|
||||
id,
|
||||
persister.getIdentifierType(),
|
||||
persister.getRootEntityName(),
|
||||
this.getEntityMode(),
|
||||
this.getFactory()
|
||||
);
|
||||
final CacheKey ck = generateCacheKey( id, persister.getIdentifierType(), persister.getRootEntityName() );
|
||||
persister.getCacheAccessStrategy().evict( ck );
|
||||
}
|
||||
|
||||
|
|
|
@ -807,11 +807,16 @@ public abstract class AbstractEntityPersister
|
|||
throw new HibernateException( "entity is not associated with the session: " + id );
|
||||
}
|
||||
|
||||
if (LOG.isTraceEnabled()) LOG.trace("Initializing lazy properties of: " + MessageHelper.infoString(this, id, getFactory())
|
||||
+ ", field access: " + fieldName);
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.trace(
|
||||
"Initializing lazy properties of: " +
|
||||
MessageHelper.infoString( this, id, getFactory() ) +
|
||||
", field access: " + fieldName
|
||||
);
|
||||
}
|
||||
|
||||
if ( hasCache() ) {
|
||||
CacheKey cacheKey = new CacheKey(id, getIdentifierType(), getEntityName(), session.getEntityMode(), getFactory() );
|
||||
CacheKey cacheKey = session.generateCacheKey( id, getIdentifierType(), getEntityName() );
|
||||
Object ce = getCacheAccessStrategy().get( cacheKey, session.getTimestamp() );
|
||||
if (ce!=null) {
|
||||
CacheEntry cacheEntry = (CacheEntry) getCacheEntryStructure().destructure(ce, factory);
|
||||
|
@ -3574,13 +3579,7 @@ public abstract class AbstractEntityPersister
|
|||
|
||||
// check to see if it is in the second-level cache
|
||||
if ( hasCache() ) {
|
||||
CacheKey ck = new CacheKey(
|
||||
id,
|
||||
getIdentifierType(),
|
||||
getRootEntityName(),
|
||||
session.getEntityMode(),
|
||||
session.getFactory()
|
||||
);
|
||||
CacheKey ck = session.generateCacheKey( id, getIdentifierType(), getRootEntityName() );
|
||||
if ( getCacheAccessStrategy().get( ck, session.getTimestamp() ) != null ) {
|
||||
return Boolean.FALSE;
|
||||
}
|
||||
|
|
|
@ -112,8 +112,12 @@ public class DynamicFilterTest extends BaseCoreFunctionalTestCase {
|
|||
Hibernate.initialize( sp.getOrders() );
|
||||
CollectionPersister persister = sessionFactory().getCollectionPersister( Salesperson.class.getName() + ".orders" );
|
||||
assertTrue( "No cache for collection", persister.hasCache() );
|
||||
CollectionCacheEntry cachedData = ( CollectionCacheEntry ) persister.getCacheAccessStrategy()
|
||||
.get( new CacheKey( testData.steveId, persister.getKeyType(), persister.getRole(), EntityMode.POJO, sessionFactory() ), ts );
|
||||
CacheKey cacheKey = ( (SessionImplementor) session ).generateCacheKey(
|
||||
testData.steveId,
|
||||
persister.getKeyType(),
|
||||
persister.getRole()
|
||||
);
|
||||
CollectionCacheEntry cachedData = ( CollectionCacheEntry ) persister.getCacheAccessStrategy().get( cacheKey, ts );
|
||||
assertNotNull( "collection was not in cache", cachedData );
|
||||
|
||||
session.close();
|
||||
|
@ -126,8 +130,12 @@ public class DynamicFilterTest extends BaseCoreFunctionalTestCase {
|
|||
.uniqueResult();
|
||||
assertEquals( "Filtered-collection not bypassing 2L-cache", 1, sp.getOrders().size() );
|
||||
|
||||
CollectionCacheEntry cachedData2 = ( CollectionCacheEntry ) persister.getCacheAccessStrategy()
|
||||
.get( new CacheKey( testData.steveId, persister.getKeyType(), persister.getRole(), EntityMode.POJO, sessionFactory() ), ts );
|
||||
CacheKey cacheKey2 = ( (SessionImplementor) session ).generateCacheKey(
|
||||
testData.steveId,
|
||||
persister.getKeyType(),
|
||||
persister.getRole()
|
||||
);
|
||||
CollectionCacheEntry cachedData2 = ( CollectionCacheEntry ) persister.getCacheAccessStrategy().get( cacheKey2, ts );
|
||||
assertNotNull( "collection no longer in cache!", cachedData2 );
|
||||
assertSame( "Different cache values!", cachedData, cachedData2 );
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
package org.hibernate.test.multitenancy.schema;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.Id;
|
||||
|
||||
/**
|
||||
|
@ -38,12 +37,12 @@ public class Customer {
|
|||
public Customer() {
|
||||
}
|
||||
|
||||
public Customer(String name) {
|
||||
public Customer(Long id, String name) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
|
|
@ -30,8 +30,12 @@ import org.hibernate.HibernateException;
|
|||
import org.hibernate.MultiTenancyStrategy;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.hibernate.cache.HashtableCacheProvider;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.mapping.PersistentClass;
|
||||
import org.hibernate.mapping.RootClass;
|
||||
import org.hibernate.service.internal.BasicServiceRegistryImpl;
|
||||
import org.hibernate.service.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
|
||||
import org.hibernate.service.jdbc.connections.spi.AbstractMultiTenantConnectionProvider;
|
||||
|
@ -58,7 +62,7 @@ public class SchemaBasedMultiTenancyTest extends BaseUnitTestCase {
|
|||
|
||||
private ServiceRegistryImplementor serviceRegistry;
|
||||
|
||||
private SessionFactory sessionFactory;
|
||||
private SessionFactoryImplementor sessionFactory;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
@ -84,9 +88,13 @@ public class SchemaBasedMultiTenancyTest extends BaseUnitTestCase {
|
|||
|
||||
Configuration cfg = new Configuration();
|
||||
cfg.getProperties().put( Environment.MULTI_TENANT, MultiTenancyStrategy.DATABASE );
|
||||
cfg.setProperty( Environment.CACHE_PROVIDER, HashtableCacheProvider.class.getName() );
|
||||
cfg.setProperty( Environment.GENERATE_STATISTICS, "true" );
|
||||
cfg.addAnnotatedClass( Customer.class );
|
||||
|
||||
cfg.buildMappings();
|
||||
RootClass meta = (RootClass) cfg.getClassMapping( Customer.class.getName() );
|
||||
meta.setCacheConcurrencyStrategy( "read-write" );
|
||||
|
||||
// do the acme export
|
||||
new SchemaExport(
|
||||
|
@ -147,7 +155,7 @@ public class SchemaBasedMultiTenancyTest extends BaseUnitTestCase {
|
|||
serviceRegistry = new BasicServiceRegistryImpl( cfg.getProperties() );
|
||||
serviceRegistry.registerService( MultiTenantConnectionProvider.class, multiTenantConnectionProvider );
|
||||
|
||||
sessionFactory = cfg.buildSessionFactory( serviceRegistry );
|
||||
sessionFactory = (SessionFactoryImplementor) cfg.buildSessionFactory( serviceRegistry );
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -175,7 +183,7 @@ public class SchemaBasedMultiTenancyTest extends BaseUnitTestCase {
|
|||
Session session = openSession();
|
||||
session.setTenantIdentifier( "jboss" );
|
||||
session.beginTransaction();
|
||||
Customer steve = new Customer( "steve" );
|
||||
Customer steve = new Customer( 1L, "steve" );
|
||||
session.save( steve );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
@ -200,4 +208,97 @@ public class SchemaBasedMultiTenancyTest extends BaseUnitTestCase {
|
|||
session.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSameIdentifiers() {
|
||||
// create a customer 'steve' in jboss
|
||||
Session session = openSession();
|
||||
session.setTenantIdentifier( "jboss" );
|
||||
session.beginTransaction();
|
||||
Customer steve = new Customer( 1L, "steve" );
|
||||
session.save( steve );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
// now, create a customer 'john' in acme
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "acme" );
|
||||
session.beginTransaction();
|
||||
Customer john = new Customer( 1L, "john" );
|
||||
session.save( john );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
sessionFactory.getStatisticsImplementor().clear();
|
||||
|
||||
// make sure we get the correct people back, from cache
|
||||
// first, jboss
|
||||
{
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "jboss" );
|
||||
session.beginTransaction();
|
||||
Customer customer = (Customer) session.load( Customer.class, 1L );
|
||||
Assert.assertEquals( "steve", customer.getName() );
|
||||
// also, make sure this came from second level
|
||||
Assert.assertEquals( 1, sessionFactory.getStatisticsImplementor().getSecondLevelCacheHitCount() );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
sessionFactory.getStatisticsImplementor().clear();
|
||||
// then, acme
|
||||
{
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "acme" );
|
||||
session.beginTransaction();
|
||||
Customer customer = (Customer) session.load( Customer.class, 1L );
|
||||
Assert.assertEquals( "john", customer.getName() );
|
||||
// also, make sure this came from second level
|
||||
Assert.assertEquals( 1, sessionFactory.getStatisticsImplementor().getSecondLevelCacheHitCount() );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
|
||||
// make sure the same works from datastore too
|
||||
sessionFactory.getStatisticsImplementor().clear();
|
||||
sessionFactory.getCache().evictEntityRegions();
|
||||
// first jboss
|
||||
{
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "jboss" );
|
||||
session.beginTransaction();
|
||||
Customer customer = (Customer) session.load( Customer.class, 1L );
|
||||
Assert.assertEquals( "steve", customer.getName() );
|
||||
// also, make sure this came from second level
|
||||
Assert.assertEquals( 0, sessionFactory.getStatisticsImplementor().getSecondLevelCacheHitCount() );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
sessionFactory.getStatisticsImplementor().clear();
|
||||
// then, acme
|
||||
{
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "acme" );
|
||||
session.beginTransaction();
|
||||
Customer customer = (Customer) session.load( Customer.class, 1L );
|
||||
Assert.assertEquals( "john", customer.getName() );
|
||||
// also, make sure this came from second level
|
||||
Assert.assertEquals( 0, sessionFactory.getStatisticsImplementor().getSecondLevelCacheHitCount() );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "jboss" );
|
||||
session.beginTransaction();
|
||||
session.delete( steve );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
|
||||
session = openSession();
|
||||
session.setTenantIdentifier( "acme" );
|
||||
session.beginTransaction();
|
||||
session.delete( john );
|
||||
session.getTransaction().commit();
|
||||
session.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.hibernate.Interceptor;
|
|||
import org.hibernate.Query;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.cache.CacheKey;
|
||||
import org.hibernate.collection.PersistentCollection;
|
||||
import org.hibernate.engine.EntityKey;
|
||||
import org.hibernate.engine.LoadQueryInfluencers;
|
||||
|
@ -88,6 +89,11 @@ public abstract class AbstractDelegateSessionImplementor implements SessionImple
|
|||
return delegate.generateEntityKey( id, persister );
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheKey generateCacheKey(Serializable id, Type type, String entityOrRoleName) {
|
||||
return delegate.generateCacheKey( id, type, entityOrRoleName );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T execute(Callback<T> callback) {
|
||||
return delegate.execute( callback );
|
||||
|
|
Loading…
Reference in New Issue