cleanups in event listeners
Signed-off-by: Gavin King <gavin@hibernate.org>
This commit is contained in:
parent
ec3be767e4
commit
9c75adcffc
|
@ -38,6 +38,7 @@ import org.hibernate.internal.FastSessionServices;
|
|||
import org.hibernate.jpa.event.spi.CallbackRegistry;
|
||||
import org.hibernate.jpa.event.spi.CallbackRegistryConsumer;
|
||||
import org.hibernate.jpa.event.spi.CallbackType;
|
||||
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.pretty.MessageHelper;
|
||||
|
@ -463,12 +464,12 @@ public class DefaultDeleteEventListener implements DeleteEventListener, Callback
|
|||
|
||||
final String[] propertyNames = persister.getPropertyNames();
|
||||
final BytecodeEnhancementMetadata enhancementMetadata = persister.getBytecodeEnhancementMetadata();
|
||||
final MappingMetamodelImplementor metamodel = persister.getFactory().getMappingMetamodel();
|
||||
for ( int i = 0; i < types.length; i++) {
|
||||
if ( types[i] instanceof CollectionType collectionType
|
||||
&& !enhancementMetadata.isAttributeLoaded( parent, propertyNames[i] ) ) {
|
||||
final CollectionPersister collectionDescriptor =
|
||||
persister.getFactory().getMappingMetamodel()
|
||||
.getCollectionDescriptor( collectionType.getRole() );
|
||||
metamodel.getCollectionDescriptor( collectionType.getRole() );
|
||||
if ( collectionDescriptor.needsRemove() || collectionDescriptor.hasCache() ) {
|
||||
final Object keyOfOwner = collectionType.getKeyOfOwner( parent, eventSource.getSession() );
|
||||
// This will make sure that a CollectionEntry exists
|
||||
|
|
|
@ -247,8 +247,8 @@ public class DefaultMergeEventListener
|
|||
CompositeType compositeType,
|
||||
EventSource session,
|
||||
MergeContext mergeContext) {
|
||||
final SessionFactoryImplementor sessionFactory = session.getSessionFactory();
|
||||
final Object idCopy = compositeType.deepCopy( id, sessionFactory );
|
||||
final SessionFactoryImplementor factory = session.getSessionFactory();
|
||||
final Object idCopy = compositeType.deepCopy( id, factory );
|
||||
final Type[] subtypes = compositeType.getSubtypes();
|
||||
final Object[] propertyValues = compositeType.getPropertyValues( id );
|
||||
final Object[] copyValues = compositeType.getPropertyValues( idCopy );
|
||||
|
@ -256,22 +256,17 @@ public class DefaultMergeEventListener
|
|||
final Type subtype = subtypes[i];
|
||||
if ( subtype instanceof EntityType ) {
|
||||
// the value of the copy in the MergeContext has the id assigned
|
||||
final Object o = mergeContext.get( propertyValues[i] );
|
||||
if ( o != null ) {
|
||||
copyValues[i] = o;
|
||||
}
|
||||
else {
|
||||
copyValues[i] = subtype.deepCopy( propertyValues[i], sessionFactory );
|
||||
}
|
||||
final Object object = mergeContext.get( propertyValues[i] );
|
||||
copyValues[i] = object == null ? subtype.deepCopy( propertyValues[i], factory ) : object;
|
||||
}
|
||||
else if ( subtype instanceof AnyType ) {
|
||||
copyValues[i] = copyCompositeTypeId( propertyValues[i], (AnyType) subtype, session, mergeContext );
|
||||
else if ( subtype instanceof AnyType anyType ) {
|
||||
copyValues[i] = copyCompositeTypeId( propertyValues[i], anyType, session, mergeContext );
|
||||
}
|
||||
else if ( subtype instanceof ComponentType ) {
|
||||
copyValues[i] = copyCompositeTypeId( propertyValues[i], (ComponentType) subtype, session, mergeContext );
|
||||
else if ( subtype instanceof ComponentType componentType ) {
|
||||
copyValues[i] = copyCompositeTypeId( propertyValues[i], componentType, session, mergeContext );
|
||||
}
|
||||
else {
|
||||
copyValues[i] = subtype.deepCopy( propertyValues[i], sessionFactory );
|
||||
copyValues[i] = subtype.deepCopy( propertyValues[i], factory );
|
||||
}
|
||||
}
|
||||
return compositeType.replacePropertyValues( idCopy, copyValues, session );
|
||||
|
@ -326,14 +321,16 @@ public class DefaultMergeEventListener
|
|||
event.setResult( copy );
|
||||
|
||||
if ( isPersistentAttributeInterceptable( copy ) ) {
|
||||
final PersistentAttributeInterceptor interceptor = asPersistentAttributeInterceptable( copy ).$$_hibernate_getInterceptor();
|
||||
final PersistentAttributeInterceptor interceptor =
|
||||
asPersistentAttributeInterceptable( copy ).$$_hibernate_getInterceptor();
|
||||
if ( interceptor == null ) {
|
||||
persister.getBytecodeEnhancementMetadata().injectInterceptor( copy, id, session );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Object copyEntity(MergeContext copyCache, Object entity, EventSource session, EntityPersister persister, Object id) {
|
||||
private static Object copyEntity(
|
||||
MergeContext copyCache, Object entity, EventSource session, EntityPersister persister, Object id) {
|
||||
final Object existingCopy = copyCache.get( entity );
|
||||
if ( existingCopy != null ) {
|
||||
persister.setIdentifier( existingCopy, id, session );
|
||||
|
@ -352,15 +349,14 @@ public class DefaultMergeEventListener
|
|||
super( entity, id, session );
|
||||
}
|
||||
@Override
|
||||
protected Object processCollection(Object collection, CollectionType collectionType) throws HibernateException {
|
||||
protected Object processCollection(Object collection, CollectionType collectionType) {
|
||||
if ( collection instanceof PersistentCollection ) {
|
||||
final PersistentCollection<?> coll = (PersistentCollection<?>) collection;
|
||||
final CollectionPersister persister = getSession().getFactory()
|
||||
.getRuntimeMetamodels()
|
||||
.getMappingMetamodel()
|
||||
.getCollectionDescriptor( collectionType.getRole() );
|
||||
final CollectionEntry collectionEntry = getSession().getPersistenceContextInternal()
|
||||
.getCollectionEntry( coll );
|
||||
final CollectionPersister persister =
|
||||
getSession().getFactory().getMappingMetamodel()
|
||||
.getCollectionDescriptor( collectionType.getRole() );
|
||||
final CollectionEntry collectionEntry =
|
||||
getSession().getPersistenceContextInternal().getCollectionEntry( coll );
|
||||
if ( !coll.equalsSnapshot( persister ) ) {
|
||||
collectionEntry.resetStoredSnapshot( coll, coll.getSnapshot( persister ) );
|
||||
}
|
||||
|
@ -368,7 +364,7 @@ public class DefaultMergeEventListener
|
|||
return null;
|
||||
}
|
||||
@Override
|
||||
Object processEntity(Object value, EntityType entityType) throws HibernateException {
|
||||
Object processEntity(Object value, EntityType entityType) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -400,13 +396,9 @@ public class DefaultMergeEventListener
|
|||
if ( originalId == null ) {
|
||||
originalId = persister.getIdentifier( entity, source );
|
||||
}
|
||||
final Object clonedIdentifier;
|
||||
if ( copiedId == null ) {
|
||||
clonedIdentifier = persister.getIdentifierType().deepCopy( originalId, event.getFactory() );
|
||||
}
|
||||
else {
|
||||
clonedIdentifier = copiedId;
|
||||
}
|
||||
final Object clonedIdentifier = copiedId == null
|
||||
? persister.getIdentifierType().deepCopy( originalId, event.getFactory() )
|
||||
: copiedId;
|
||||
final Object id = getDetachedEntityId( event, originalId, persister );
|
||||
// we must clone embedded composite identifiers, or we will get back the same instance that we pass in
|
||||
// apply the special MERGE fetch profile and perform the resolution (Session#get)
|
||||
|
@ -482,8 +474,7 @@ public class DefaultMergeEventListener
|
|||
}
|
||||
else {
|
||||
// check that entity id = requestedId
|
||||
final Object entityId = originalId;
|
||||
if ( !persister.getIdentifierType().isEqual( id, entityId, source.getFactory() ) ) {
|
||||
if ( !persister.getIdentifierType().isEqual( id, originalId, source.getFactory() ) ) {
|
||||
throw new HibernateException( "merge requested with id not matching id of passed entity" );
|
||||
}
|
||||
return id;
|
||||
|
@ -507,8 +498,8 @@ public class DefaultMergeEventListener
|
|||
final PersistentAttributeInterceptor managedInterceptor =
|
||||
asPersistentAttributeInterceptable( managed ).$$_hibernate_getInterceptor();
|
||||
|
||||
// todo - do we need to specially handle the case where both `incoming` and `managed` are initialized, but
|
||||
// with different attributes initialized?
|
||||
// todo - do we need to specially handle the case where both `incoming` and `managed`
|
||||
// are initialized, but with different attributes initialized?
|
||||
// - for now, assume we do not...
|
||||
|
||||
// if the managed entity is not a proxy, we can just return it
|
||||
|
@ -533,9 +524,9 @@ public class DefaultMergeEventListener
|
|||
if ( isSelfDirtinessTracker( entity ) && isSelfDirtinessTracker( target ) ) {
|
||||
// clear, because setting the embedded attributes dirties them
|
||||
final ManagedEntity managedEntity = asManagedEntity( target );
|
||||
final boolean useTracker = asManagedEntity( entity ).$$_hibernate_useTracker();
|
||||
final SelfDirtinessTracker selfDirtinessTrackerTarget = asSelfDirtinessTracker( target );
|
||||
if ( !selfDirtinessTrackerTarget.$$_hibernate_hasDirtyAttributes() && !useTracker ) {
|
||||
if ( !selfDirtinessTrackerTarget.$$_hibernate_hasDirtyAttributes()
|
||||
&& !asManagedEntity( entity ).$$_hibernate_useTracker() ) {
|
||||
managedEntity.$$_hibernate_setUseTracker( false );
|
||||
}
|
||||
else {
|
||||
|
@ -560,10 +551,9 @@ public class DefaultMergeEventListener
|
|||
// an entity to be merged during the same transaction
|
||||
// (though during a separate operation) in which it was
|
||||
// originally persisted/saved
|
||||
boolean changed = !persister.getVersionType().isSame(
|
||||
persister.getVersion( target ),
|
||||
persister.getVersion( entity )
|
||||
);
|
||||
final boolean changed =
|
||||
!persister.getVersionType()
|
||||
.isSame( persister.getVersion( target ), persister.getVersion( entity ) );
|
||||
// TODO : perhaps we should additionally require that the incoming entity
|
||||
// version be equivalent to the defined unsaved-value?
|
||||
return changed && existsInDatabase( target, source, persister );
|
||||
|
@ -577,7 +567,7 @@ public class DefaultMergeEventListener
|
|||
final PersistenceContext persistenceContext = source.getPersistenceContextInternal();
|
||||
EntityEntry entry = persistenceContext.getEntry( entity );
|
||||
if ( entry == null ) {
|
||||
Object id = persister.getIdentifier( entity, source );
|
||||
final Object id = persister.getIdentifier( entity, source );
|
||||
if ( id != null ) {
|
||||
final EntityKey key = source.generateEntityKey( id, persister );
|
||||
final Object managedEntity = persistenceContext.getEntity( key );
|
||||
|
@ -606,7 +596,6 @@ public class DefaultMergeEventListener
|
|||
target,
|
||||
copyCache
|
||||
);
|
||||
|
||||
persister.setValues( target, copiedValues );
|
||||
}
|
||||
}
|
||||
|
@ -618,9 +607,7 @@ public class DefaultMergeEventListener
|
|||
final SessionImplementor source,
|
||||
final MergeContext copyCache,
|
||||
final ForeignKeyDirection foreignKeyDirection) {
|
||||
|
||||
final Object[] copiedValues;
|
||||
|
||||
if ( foreignKeyDirection == ForeignKeyDirection.TO_PARENT ) {
|
||||
// this is the second pass through on a merge op, so here we limit the
|
||||
// replacement to associations types (value types were already replaced
|
||||
|
@ -646,7 +633,6 @@ public class DefaultMergeEventListener
|
|||
foreignKeyDirection
|
||||
);
|
||||
}
|
||||
|
||||
persister.setValues( target, copiedValues );
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.hibernate.cache.spi.access.EntityDataAccess;
|
|||
import org.hibernate.cache.spi.access.SoftLock;
|
||||
import org.hibernate.engine.internal.Cascade;
|
||||
import org.hibernate.engine.internal.CascadePoint;
|
||||
import org.hibernate.engine.spi.ActionQueue;
|
||||
import org.hibernate.engine.spi.CascadingActions;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
|
@ -32,14 +31,13 @@ import org.hibernate.loader.ast.spi.CascadingFetchProfile;
|
|||
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
|
||||
import org.hibernate.persister.collection.CollectionPersister;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.hibernate.proxy.LazyInitializer;
|
||||
import org.hibernate.type.CollectionType;
|
||||
import org.hibernate.type.ComponentType;
|
||||
import org.hibernate.type.CompositeType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
import static org.hibernate.pretty.MessageHelper.infoString;
|
||||
import static org.hibernate.proxy.HibernateProxy.extractLazyInitializer;
|
||||
|
||||
/**
|
||||
* Defines the default refresh event listener used by hibernate for refreshing entities
|
||||
|
@ -71,14 +69,13 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
// cascade refresh and the refresh of the parent will take care of initializing the lazy
|
||||
// entity and setting the correct lock on it, this is needed only when the refresh is called directly on a lazy entity
|
||||
if ( refreshedAlready.isEmpty() ) {
|
||||
final LazyInitializer lazyInitializer = HibernateProxy.extractLazyInitializer( object );
|
||||
final LazyInitializer lazyInitializer = extractLazyInitializer( object );
|
||||
final EntityPersister persister;
|
||||
if ( lazyInitializer != null ) {
|
||||
persister = source.getEntityPersister( lazyInitializer.getEntityName(), object );
|
||||
}
|
||||
else if ( !isTransient ) {
|
||||
final EntityEntry entry = persistenceContext.getEntry( object );
|
||||
persister = entry.getPersister();
|
||||
persister = persistenceContext.getEntry( object ).getPersister();
|
||||
}
|
||||
else {
|
||||
persister = source.getEntityPersister( source.guessEntityName( object ), object );
|
||||
|
@ -275,7 +272,7 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
if ( result != null ) {
|
||||
// apply `postRefreshLockMode`, if needed
|
||||
if ( postRefreshLockMode != null ) {
|
||||
// if we get here, there was a previous entry, and we need to re-set its lock-mode
|
||||
// if we get here, there was a previous entry, and we need to reset its lock mode
|
||||
// - however, the refresh operation actually creates a new entry, so get it
|
||||
persistenceContext.getEntry( result ).setLockMode( postRefreshLockMode );
|
||||
}
|
||||
|
@ -312,13 +309,12 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
|
||||
private static void evictCachedCollections(Type[] types, Object id, EventSource source)
|
||||
throws HibernateException {
|
||||
final ActionQueue actionQueue = source.getActionQueue();
|
||||
final SessionFactoryImplementor factory = source.getFactory();
|
||||
final MappingMetamodelImplementor metamodel = factory.getRuntimeMetamodels().getMappingMetamodel();
|
||||
final MappingMetamodelImplementor metamodel = factory.getMappingMetamodel();
|
||||
for ( Type type : types ) {
|
||||
if ( type instanceof CollectionType ) {
|
||||
final String role = ((CollectionType) type).getRole();
|
||||
final CollectionPersister collectionPersister = metamodel.getCollectionDescriptor( role );
|
||||
if ( type instanceof CollectionType collectionType ) {
|
||||
final CollectionPersister collectionPersister =
|
||||
metamodel.getCollectionDescriptor( collectionType.getRole() );
|
||||
if ( collectionPersister.hasCache() ) {
|
||||
final CollectionDataAccess cache = collectionPersister.getCacheAccessStrategy();
|
||||
final Object ck = cache.generateCacheKey(
|
||||
|
@ -329,12 +325,11 @@ public class DefaultRefreshEventListener implements RefreshEventListener {
|
|||
);
|
||||
final SoftLock lock = cache.lockItem( source, ck, null );
|
||||
cache.remove( source, ck );
|
||||
actionQueue.registerProcess( (success, session) -> cache.unlockItem( session, ck, lock ) );
|
||||
source.getActionQueue().registerProcess( (success, session) -> cache.unlockItem( session, ck, lock ) );
|
||||
}
|
||||
}
|
||||
else if ( type instanceof ComponentType ) {
|
||||
else if ( type instanceof ComponentType compositeType ) {
|
||||
// Only components can contain collections
|
||||
ComponentType compositeType = (ComponentType) type;
|
||||
evictCachedCollections( compositeType.getSubtypes(), id, source );
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue