HHH-13496 Identified and simplified some unnecessary repetition of getter invocations

This commit is contained in:
Sanne Grinovero 2019-07-03 11:41:06 +01:00
parent e7165f2d43
commit 6c44ef12e6
40 changed files with 426 additions and 371 deletions

View File

@ -22,6 +22,7 @@ import org.hibernate.cache.spi.access.NaturalIdDataAccess;
import org.hibernate.cache.spi.access.SoftLock;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.metamodel.spi.MetamodelImplementor;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Queryable;
@ -111,7 +112,8 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
spacesList.addAll( tableSpaces );
final SessionFactoryImplementor factory = session.getFactory();
for ( EntityPersister persister : factory.getMetamodel().entityPersisters().values() ) {
final MetamodelImplementor metamodel = factory.getMetamodel();
for ( EntityPersister persister : metamodel.entityPersisters().values() ) {
final String[] entitySpaces = (String[]) persister.getQuerySpaces();
if ( affectedEntity( tableSpaces, entitySpaces ) ) {
spacesList.addAll( Arrays.asList( entitySpaces ) );
@ -123,10 +125,10 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
naturalIdCleanups.add( new NaturalIdCleanup( persister.getNaturalIdCacheAccessStrategy(), session ) );
}
final Set<String> roles = session.getFactory().getMetamodel().getCollectionRolesByEntityParticipant( persister.getEntityName() );
final Set<String> roles = metamodel.getCollectionRolesByEntityParticipant( persister.getEntityName() );
if ( roles != null ) {
for ( String role : roles ) {
final CollectionPersister collectionPersister = factory.getMetamodel().collectionPersister( role );
final CollectionPersister collectionPersister = metamodel.collectionPersister( role );
if ( collectionPersister.hasCache() ) {
collectionCleanups.add(
new CollectionCleanup( collectionPersister.getCacheAccessStrategy(), session )

View File

@ -95,7 +95,7 @@ public final class CollectionUpdateAction extends CollectionAction {
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.updateCollection( getPersister().getRole() );
statistics.updateCollection( persister.getRole() );
}
}

View File

@ -222,8 +222,9 @@ public final class EntityInsertAction extends AbstractEntityInsertAction {
final Object ck = cache.generateCacheKey( getId(), persister, factory, session.getTenantIdentifier() );
final boolean put = cacheAfterInsert( cache, ck );
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().entityCachePut(
final StatisticsImplementor statistics = factory.getStatistics();
if ( put && statistics.isStatisticsEnabled() ) {
statistics.entityCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
cache.getRegion().getName()
);

View File

@ -320,10 +320,11 @@ public final class EntityUpdateAction extends EntityAction {
final EntityPersister persister = getPersister();
if ( persister.canWriteToCache() ) {
final EntityDataAccess cache = persister.getCacheAccessStrategy();
final SessionFactoryImplementor factory = session.getFactory();
final Object ck = cache.generateCacheKey(
getId(),
persister,
session.getFactory(),
factory,
session.getTenantIdentifier()
);
@ -334,15 +335,16 @@ public final class EntityUpdateAction extends EntityAction {
session.getCacheMode().isPutEnabled() ) {
final boolean put = cacheAfterUpdate( cache, ck );
if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
session.getFactory().getStatistics().entityCachePut(
final StatisticsImplementor statistics = factory.getStatistics();
if ( put && statistics.isStatisticsEnabled() ) {
statistics.entityCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
getPersister().getCacheAccessStrategy().getRegion().getName()
cache.getRegion().getName()
);
}
}
else {
cache.unlockItem(session, ck, lock );
cache.unlockItem( session, ck, lock );
}
}
postCommitUpdate( success );

View File

@ -13,6 +13,8 @@ import org.hibernate.HibernateException;
import org.hibernate.action.internal.CollectionAction;
import org.hibernate.action.spi.AfterTransactionCompletionProcess;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.spi.SessionFactoryOptions;
import org.hibernate.cache.spi.access.CollectionDataAccess;
import org.hibernate.cache.spi.access.SoftLock;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
@ -27,9 +29,11 @@ import org.hibernate.event.spi.PostInsertEventListener;
import org.hibernate.event.spi.PostUpdateEvent;
import org.hibernate.event.spi.PostUpdateEventListener;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.metamodel.spi.MetamodelImplementor;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.tuple.entity.EntityMetamodel;
import org.jboss.logging.Logger;
@ -82,11 +86,12 @@ public class CollectionCacheInvalidator
}
private void integrate(SessionFactoryServiceRegistry serviceRegistry, SessionFactoryImplementor sessionFactory) {
if ( !sessionFactory.getSessionFactoryOptions().isAutoEvictCollectionCache() ) {
final SessionFactoryOptions sessionFactoryOptions = sessionFactory.getSessionFactoryOptions();
if ( !sessionFactoryOptions.isAutoEvictCollectionCache() ) {
// feature is disabled
return;
}
if ( !sessionFactory.getSessionFactoryOptions().isSecondLevelCacheEnabled() ) {
if ( !sessionFactoryOptions.isSecondLevelCacheEnabled() ) {
// Nothing to do, if caching is disabled
return;
}
@ -100,12 +105,14 @@ public class CollectionCacheInvalidator
try {
SessionFactoryImplementor factory = persister.getFactory();
Set<String> collectionRoles = factory.getMetamodel().getCollectionRolesByEntityParticipant( persister.getEntityName() );
final MetamodelImplementor metamodel = factory.getMetamodel();
Set<String> collectionRoles = metamodel.getCollectionRolesByEntityParticipant( persister.getEntityName() );
if ( collectionRoles == null || collectionRoles.isEmpty() ) {
return;
}
final EntityMetamodel entityMetamodel = persister.getEntityMetamodel();
for ( String role : collectionRoles ) {
final CollectionPersister collectionPersister = factory.getMetamodel().collectionPersister( role );
final CollectionPersister collectionPersister = metamodel.collectionPersister( role );
if ( !collectionPersister.hasCache() ) {
// ignore collection if no caching is used
continue;
@ -114,7 +121,7 @@ public class CollectionCacheInvalidator
String mappedBy = collectionPersister.getMappedByProperty();
if ( !collectionPersister.isManyToMany() &&
mappedBy != null && !mappedBy.isEmpty() ) {
int i = persister.getEntityMetamodel().getPropertyIndex( mappedBy );
int i = entityMetamodel.getPropertyIndex( mappedBy );
Serializable oldId = null;
if ( oldState != null ) {
// in case of updating an entity we perhaps have to decache 2 entity collections, this is the
@ -136,9 +143,10 @@ public class CollectionCacheInvalidator
}
else {
LOG.debug( "Evict CollectionRegion " + role );
final SoftLock softLock = collectionPersister.getCacheAccessStrategy().lockRegion();
final CollectionDataAccess cacheAccessStrategy = collectionPersister.getCacheAccessStrategy();
final SoftLock softLock = cacheAccessStrategy.lockRegion();
session.getActionQueue().registerProcess( (success, session1) -> {
collectionPersister.getCacheAccessStrategy().unlockRegion( softLock );
cacheAccessStrategy.unlockRegion( softLock );
} );
}
}

View File

@ -11,8 +11,10 @@ import java.io.Serializable;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.engine.spi.SessionEventListenerManager;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.jboss.logging.Logger;
@ -42,28 +44,32 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
final SessionFactoryImplementor factory = session.getFactory();
final RegionFactory regionFactory = factory.getCache().getRegionFactory();
final boolean stats = factory.getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = factory.getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
final Long ts = regionFactory.nextTimestamp() + regionFactory.getTimeout();
final SessionEventListenerManager eventListenerManager = session.getEventListenerManager();
final boolean debugEnabled = log.isDebugEnabled();
for ( Serializable space : spaces ) {
if ( log.isDebugEnabled() ) {
if ( debugEnabled ) {
log.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts );
}
try {
session.getEventListenerManager().cachePutStart();
eventListenerManager.cachePutStart();
//put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync
timestampsRegion.putIntoCache( space, ts, session );
}
finally {
session.getEventListenerManager().cachePutEnd();
eventListenerManager.cachePutEnd();
}
if ( stats ) {
factory.getStatistics().updateTimestampsCachePut();
statistics.updateTimestampsCachePut();
}
}
}
@ -72,24 +78,27 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
public void invalidate(
String[] spaces,
SharedSessionContractImplementor session) {
final boolean stats = session.getFactory().getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = session.getFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
final Long ts = session.getFactory().getCache().getRegionFactory().nextTimestamp();
final boolean debugEnabled = log.isDebugEnabled();
for (Serializable space : spaces) {
if ( log.isDebugEnabled() ) {
for ( Serializable space : spaces ) {
if ( debugEnabled ) {
log.debugf( "Invalidating space [%s], timestamp: %s", space, ts );
}
final SessionEventListenerManager eventListenerManager = session.getEventListenerManager();
try {
session.getEventListenerManager().cachePutStart();
eventListenerManager.cachePutStart();
timestampsRegion.putIntoCache( space, ts, session );
}
finally {
session.getEventListenerManager().cachePutEnd();
eventListenerManager.cachePutEnd();
if ( stats ) {
session.getFactory().getStatistics().updateTimestampsCachePut();
statistics.updateTimestampsCachePut();
}
}
}
@ -100,7 +109,10 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
String[] spaces,
Long timestamp,
SharedSessionContractImplementor session) {
final boolean stats = session.getFactory().getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = session.getFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
final boolean debugEnabled = log.isDebugEnabled();
for ( Serializable space : spaces ) {
final Long lastUpdate = getLastUpdateTimestampForSpace( space, session );
@ -108,11 +120,11 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
// the last update timestamp for the given space was evicted from the
// cache or there have been no writes to it since startup
if ( stats ) {
session.getFactory().getStatistics().updateTimestampsCacheMiss();
statistics.updateTimestampsCacheMiss();
}
}
else {
if ( log.isDebugEnabled() ) {
if ( debugEnabled ) {
log.debugf(
"[%s] last update timestamp: %s",
space,
@ -120,7 +132,7 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
);
}
if ( stats ) {
session.getFactory().getStatistics().updateTimestampsCacheHit();
statistics.updateTimestampsCacheHit();
}
if ( lastUpdate >= timestamp ) {
return false;

View File

@ -1,47 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.cfg;
import java.util.Set;
import org.hibernate.internal.util.xml.XmlDocument;
/**
* Represents a mapping queued for delayed processing to await
* processing of an extends entity upon which it depends.
*
* @author Steve Ebersole
*/
public class ExtendsQueueEntry {
private final String explicitName;
private final String mappingPackage;
private final XmlDocument metadataXml;
private final Set<String> entityNames;
public ExtendsQueueEntry(String explicitName, String mappingPackage, XmlDocument metadataXml, Set<String> entityNames) {
this.explicitName = explicitName;
this.mappingPackage = mappingPackage;
this.metadataXml = metadataXml;
this.entityNames = entityNames;
}
public String getExplicitName() {
return explicitName;
}
public String getMappingPackage() {
return mappingPackage;
}
public XmlDocument getMetadataXml() {
return metadataXml;
}
public Set<String> getEntityNames() {
return entityNames;
}
}

View File

@ -70,6 +70,7 @@ import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.boot.model.IdentifierGeneratorDefinition;
import org.hibernate.boot.model.TypeDefinition;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.AccessType;
import org.hibernate.cfg.AnnotatedClassType;
@ -461,8 +462,9 @@ public abstract class CollectionBinder {
}
// set explicit type information
final InFlightMetadataCollector metadataCollector = buildingContext.getMetadataCollector();
if ( explicitType != null ) {
final TypeDefinition typeDef = buildingContext.getMetadataCollector().getTypeDefinition( explicitType );
final TypeDefinition typeDef = metadataCollector.getTypeDefinition( explicitType );
if ( typeDef == null ) {
collection.setTypeName( explicitType );
collection.setTypeParameters( explicitTypeParameters );
@ -554,7 +556,7 @@ public abstract class CollectionBinder {
//many to many may need some second pass informations
if ( !oneToMany && isMappedBy ) {
buildingContext.getMetadataCollector().addMappedBy( getCollectionType().getName(), mappedBy, propertyName );
metadataCollector.addMappedBy( getCollectionType().getName(), mappedBy, propertyName );
}
//TODO reducce tableBinder != null and oneToMany
XClass collectionType = getCollectionType();
@ -580,13 +582,13 @@ public abstract class CollectionBinder {
// do it right away, otherwise @ManyToOne on composite element call addSecondPass
// and raise a ConcurrentModificationException
//sp.doSecondPass( CollectionHelper.EMPTY_MAP );
buildingContext.getMetadataCollector().addSecondPass( sp, !isMappedBy );
metadataCollector.addSecondPass( sp, !isMappedBy );
}
else {
buildingContext.getMetadataCollector().addSecondPass( sp, !isMappedBy );
metadataCollector.addSecondPass( sp, !isMappedBy );
}
buildingContext.getMetadataCollector().addCollectionBinding( collection );
metadataCollector.addCollectionBinding( collection );
//property building
PropertyBinder binder = new PropertyBinder();

View File

@ -461,10 +461,11 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
@SuppressWarnings({"JavaDoc"})
protected boolean isInverseCollectionNoOrphanDelete() {
final CollectionEntry ce = session.getPersistenceContextInternal().getCollectionEntry( this );
return ce != null
&&
ce.getLoadedPersister().isInverse() &&
!ce.getLoadedPersister().hasOrphanDelete();
if ( ce == null ) {
return false;
}
final CollectionPersister loadedPersister = ce.getLoadedPersister();
return loadedPersister.isInverse() && !loadedPersister.hasOrphanDelete();
}
/**
@ -474,9 +475,11 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
@SuppressWarnings({"JavaDoc"})
protected boolean isInverseOneToManyOrNoOrphanDelete() {
final CollectionEntry ce = session.getPersistenceContextInternal().getCollectionEntry( this );
return ce != null
&& ce.getLoadedPersister().isInverse()
&& ( ce.getLoadedPersister().isOneToMany() || !ce.getLoadedPersister().hasOrphanDelete() );
if ( ce == null ) {
return false;
}
final CollectionPersister loadedPersister = ce.getLoadedPersister();
return loadedPersister.isInverse() && ( loadedPersister.isOneToMany() || !loadedPersister.hasOrphanDelete() );
}
/**

View File

@ -15,11 +15,13 @@ import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.SimpleSelect;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* A pessimistic locking strategy where the locks are obtained through select statements.
@ -55,42 +57,45 @@ public class PessimisticReadSelectLockingStrategy extends AbstractSelectLockingS
final String sql = determineSql( timeout );
final SessionFactoryImplementor factory = session.getFactory();
try {
final Lockable lockable = getLockable();
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
getLockable().getIdentifierType().nullSafeSet( st, id, 1, session );
if ( getLockable().isVersioned() ) {
getLockable().getVersionType().nullSafeSet(
lockable.getIdentifierType().nullSafeSet( st, id, 1, session );
if ( lockable.isVersioned() ) {
lockable.getVersionType().nullSafeSet(
st,
version,
getLockable().getIdentifierType().getColumnSpan( factory ) + 1,
lockable.getIdentifierType().getColumnSpan( factory ) + 1,
session
);
}
final ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
final ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().optimisticFailure( getLockable().getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( getLockable().getEntityName(), id );
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( rs, st );
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( rs, st );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException e ) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
e,
"could not lock: " + MessageHelper.infoString( getLockable(), id, session.getFactory() ),
"could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ),
sql
);
}

View File

@ -14,12 +14,14 @@ import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.jboss.logging.Logger;
@ -75,7 +77,8 @@ public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
final SessionFactoryImplementor factory = session.getFactory();
try {
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
lockable.getVersionType().nullSafeSet( st, version, 1, session );
int offset = 2;
@ -87,19 +90,20 @@ public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
}
final int affected = session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
final int affected = jdbcCoordinator.getResultSetReturn().executeUpdate( st );
// todo: should this instead check for exactly one row modified?
if ( affected < 0 ) {
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatistics().optimisticFailure( lockable.getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}

View File

@ -15,11 +15,13 @@ import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.SimpleSelect;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* A pessimistic locking strategy where the locks are obtained through select statements.
@ -54,41 +56,44 @@ public class PessimisticWriteSelectLockingStrategy extends AbstractSelectLocking
final String sql = determineSql( timeout );
final SessionFactoryImplementor factory = session.getFactory();
try {
final Lockable lockable = getLockable();
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
getLockable().getIdentifierType().nullSafeSet( st, id, 1, session );
if ( getLockable().isVersioned() ) {
getLockable().getVersionType().nullSafeSet(
lockable.getIdentifierType().nullSafeSet( st, id, 1, session );
if ( lockable.isVersioned() ) {
lockable.getVersionType().nullSafeSet(
st,
version,
getLockable().getIdentifierType().getColumnSpan( factory ) + 1,
lockable.getIdentifierType().getColumnSpan( factory ) + 1,
session
);
}
final ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
final ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().optimisticFailure( getLockable().getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( getLockable().getEntityName(), id );
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( rs, st );
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( rs, st );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException e ) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
e,
"could not lock: " + MessageHelper.infoString( getLockable(), id, session.getFactory() ),
"could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ),
sql
);
}

View File

@ -14,12 +14,14 @@ import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.jboss.logging.Logger;
@ -74,7 +76,8 @@ public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
final SessionFactoryImplementor factory = session.getFactory();
try {
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
lockable.getVersionType().nullSafeSet( st, version, 1, session );
int offset = 2;
@ -86,19 +89,20 @@ public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
}
final int affected = session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
final int affected = jdbcCoordinator.getResultSetReturn().executeUpdate( st );
// todo: should this instead check for exactly one row modified?
if ( affected < 0 ) {
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatistics().optimisticFailure( lockable.getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException e ) {

View File

@ -15,11 +15,13 @@ import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.SimpleSelect;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* A locking strategy where the locks are obtained through select statements.
@ -53,42 +55,45 @@ public class SelectLockingStrategy extends AbstractSelectLockingStrategy {
SharedSessionContractImplementor session) throws StaleObjectStateException, JDBCException {
final String sql = determineSql( timeout );
final SessionFactoryImplementor factory = session.getFactory();
final Lockable lockable = getLockable();
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
getLockable().getIdentifierType().nullSafeSet( st, id, 1, session );
if ( getLockable().isVersioned() ) {
getLockable().getVersionType().nullSafeSet(
lockable.getIdentifierType().nullSafeSet( st, id, 1, session );
if ( lockable.isVersioned() ) {
lockable.getVersionType().nullSafeSet(
st,
version,
getLockable().getIdentifierType().getColumnSpan( factory ) + 1,
lockable.getIdentifierType().getColumnSpan( factory ) + 1,
session
);
}
final ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
final ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().optimisticFailure( getLockable().getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( getLockable().getEntityName(), id );
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( rs, st );
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( rs, st );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException sqle ) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
sqle,
"could not lock: " + MessageHelper.infoString( getLockable(), id, session.getFactory() ),
"could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ),
sql
);
}

View File

@ -14,12 +14,16 @@ import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.Type;
import org.hibernate.type.VersionType;
import org.jboss.logging.Logger;
@ -70,37 +74,42 @@ public class UpdateLockingStrategy implements LockingStrategy {
Object object,
int timeout,
SharedSessionContractImplementor session) throws StaleObjectStateException, JDBCException {
final String lockableEntityName = lockable.getEntityName();
if ( !lockable.isVersioned() ) {
throw new HibernateException( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" );
throw new HibernateException( "write locks via update not supported for non-versioned entities [" + lockableEntityName + "]" );
}
// todo : should we additionally check the current isolation mode explicitly?
final SessionFactoryImplementor factory = session.getFactory();
try {
final PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final PreparedStatement st = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
lockable.getVersionType().nullSafeSet( st, version, 1, session );
final VersionType lockableVersionType = lockable.getVersionType();
lockableVersionType.nullSafeSet( st, version, 1, session );
int offset = 2;
lockable.getIdentifierType().nullSafeSet( st, id, offset, session );
offset += lockable.getIdentifierType().getColumnSpan( factory );
final Type lockableIdentifierType = lockable.getIdentifierType();
lockableIdentifierType.nullSafeSet( st, id, offset, session );
offset += lockableIdentifierType.getColumnSpan( factory );
if ( lockable.isVersioned() ) {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
lockableVersionType.nullSafeSet( st, version, offset, session );
}
final int affected = session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
final int affected = jdbcCoordinator.getResultSetReturn().executeUpdate( st );
if ( affected < 0 ) {
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatistics().optimisticFailure( lockable.getEntityName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockableEntityName );
}
throw new StaleObjectStateException( lockable.getEntityName(), id );
throw new StaleObjectStateException( lockableEntityName, id );
}
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}

View File

@ -21,6 +21,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.stat.internal.StatsHelper;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
@ -240,15 +241,20 @@ public class NaturalIdXrefDelegate {
// Try resolution from second-level cache
final NaturalIdDataAccess naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy();
final Object naturalIdCacheKey = naturalIdCacheAccessStrategy.generateCacheKey( naturalIdValues, persister, session() );
final SharedSessionContractImplementor session = session();
final Object naturalIdCacheKey = naturalIdCacheAccessStrategy.generateCacheKey( naturalIdValues, persister,
session
);
pk = CacheHelper.fromSharedCache( session(), naturalIdCacheKey, naturalIdCacheAccessStrategy );
pk = CacheHelper.fromSharedCache( session, naturalIdCacheKey, naturalIdCacheAccessStrategy );
// Found in second-level cache, store in session cache
final SessionFactoryImplementor factory = session().getFactory();
final SessionFactoryImplementor factory = session.getFactory();
final StatisticsImplementor statistics = factory.getStatistics();
final boolean statisticsEnabled = statistics.isStatisticsEnabled();
if ( pk != null ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCacheHit(
if ( statisticsEnabled ) {
statistics.naturalIdCacheHit(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);
@ -275,8 +281,8 @@ public class NaturalIdXrefDelegate {
entityNaturalIdResolutionCache.pkToNaturalIdMap.put( pk, cachedNaturalId );
entityNaturalIdResolutionCache.naturalIdToPkMap.put( cachedNaturalId, pk );
}
else if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCacheMiss(
else if ( statisticsEnabled ) {
statistics.naturalIdCacheMiss(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);

View File

@ -64,6 +64,7 @@ import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.proxy.LazyInitializer;
import org.hibernate.stat.internal.StatsHelper;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.CollectionType;
import org.jboss.logging.Logger;
@ -1810,6 +1811,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
final Object naturalIdCacheKey = naturalIdCacheAccessStrategy.generateCacheKey( naturalIdValues, persister, session );
final SessionFactoryImplementor factory = session.getFactory();
final StatisticsImplementor statistics = factory.getStatistics();
switch ( source ) {
case LOAD: {
@ -1824,8 +1826,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
null
);
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCachePut(
if ( put && statistics.isStatisticsEnabled() ) {
statistics.naturalIdCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);
@ -1835,8 +1837,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
case INSERT: {
final boolean put = naturalIdCacheAccessStrategy.insert( session, naturalIdCacheKey, id );
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCachePut(
if ( put && statistics.isStatisticsEnabled() ) {
statistics.naturalIdCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);
@ -1848,8 +1850,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void doAfterTransactionCompletion(boolean success, SharedSessionContractImplementor session) {
if (success) {
final boolean put = naturalIdCacheAccessStrategy.afterInsert( session, naturalIdCacheKey, id );
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCachePut(
if ( put && statistics.isStatisticsEnabled() ) {
statistics.naturalIdCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);
@ -1875,8 +1877,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
final SoftLock lock = naturalIdCacheAccessStrategy.lockItem( session, naturalIdCacheKey, null );
final boolean put = naturalIdCacheAccessStrategy.update( session, naturalIdCacheKey, id );
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCachePut(
if ( put && statistics.isStatisticsEnabled() ) {
statistics.naturalIdCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);
@ -1895,8 +1897,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
lock
);
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().naturalIdCachePut(
if ( put && statistics.isStatisticsEnabled() ) {
statistics.naturalIdCachePut(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
naturalIdCacheAccessStrategy.getRegion().getName()
);

View File

@ -477,8 +477,9 @@ public final class TwoPhaseLoad {
return session.getCacheMode() != CacheMode.REFRESH;
}
else {
return entityEntry.getPersister().hasLazyProperties()
&& entityEntry.getPersister().isLazyPropertiesCacheable();
final EntityPersister persister = entityEntry.getPersister();
return persister.hasLazyProperties()
&& persister.isLazyPropertiesCacheable();
}
}

View File

@ -24,6 +24,7 @@ import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.CollectionEntry;
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionEventListenerManager;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
@ -34,6 +35,7 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* Represents state associated with the processing of a given {@link ResultSet}
@ -229,21 +231,23 @@ public class CollectionLoadContext {
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
LOG.tracev( "Ending loading collection [{0}]", lce );
final SharedSessionContractImplementor session = getLoadContext().getPersistenceContext().getSession();
final PersistenceContext persistenceContext = getLoadContext().getPersistenceContext();
final SharedSessionContractImplementor session = persistenceContext.getSession();
// warning: can cause a recursive calls! (proxy initialization)
final boolean hasNoQueuedAdds = lce.getCollection().endRead();
final PersistentCollection loadingCollection = lce.getCollection();
final boolean hasNoQueuedAdds = loadingCollection.endRead();
if ( persister.getCollectionType().hasHolder() ) {
getLoadContext().getPersistenceContext().addCollectionHolder( lce.getCollection() );
persistenceContext.addCollectionHolder( loadingCollection );
}
CollectionEntry ce = getLoadContext().getPersistenceContext().getCollectionEntry( lce.getCollection() );
CollectionEntry ce = persistenceContext.getCollectionEntry( loadingCollection );
if ( ce == null ) {
ce = getLoadContext().getPersistenceContext().addInitializedCollection( persister, lce.getCollection(), lce.getKey() );
ce = persistenceContext.addInitializedCollection( persister, loadingCollection, lce.getKey() );
}
else {
ce.postInitialize( lce.getCollection() );
ce.postInitialize( loadingCollection );
// if (ce.getLoadedPersister().getBatchSize() > 1) { // not the best place for doing this, moved into ce.postInitialize
// getLoadContext().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce);
// }
@ -251,31 +255,32 @@ public class CollectionLoadContext {
// The collection has been completely initialized and added to the PersistenceContext.
if ( lce.getCollection().getOwner() != null ) {
if ( loadingCollection.getOwner() != null ) {
// If the owner is bytecode-enhanced and the owner's collection value is uninitialized,
// then go ahead and set it to the newly initialized collection.
final EntityPersister ownerEntityPersister = persister.getOwnerEntityPersister();
final BytecodeEnhancementMetadata bytecodeEnhancementMetadata =
persister.getOwnerEntityPersister().getBytecodeEnhancementMetadata();
ownerEntityPersister.getBytecodeEnhancementMetadata();
if ( bytecodeEnhancementMetadata.isEnhancedForLazyLoading() ) {
// Lazy properties in embeddables/composites are not currently supported for embeddables (HHH-10480),
// so check to make sure the collection is not in an embeddable before checking to see if
// the collection is lazy.
// TODO: More will probably need to be done here when HHH-10480 is fixed..
if ( StringHelper.qualifier( persister.getRole() ).length() ==
persister.getOwnerEntityPersister().getEntityName().length() ) {
ownerEntityPersister.getEntityName().length() ) {
// Assume the collection is not in an embeddable.
// Strip off <entityName><dot> to get the collection property name.
final String propertyName = persister.getRole().substring(
persister.getOwnerEntityPersister().getEntityName().length() + 1
ownerEntityPersister.getEntityName().length() + 1
);
if ( !bytecodeEnhancementMetadata.isAttributeLoaded( lce.getCollection().getOwner(), propertyName ) ) {
int propertyIndex = persister.getOwnerEntityPersister().getEntityMetamodel().getPropertyIndex(
if ( !bytecodeEnhancementMetadata.isAttributeLoaded( loadingCollection.getOwner(), propertyName ) ) {
int propertyIndex = ownerEntityPersister.getEntityMetamodel().getPropertyIndex(
propertyName
);
persister.getOwnerEntityPersister().setPropertyValue(
lce.getCollection().getOwner(),
ownerEntityPersister.setPropertyValue(
loadingCollection.getOwner(),
propertyIndex,
lce.getCollection()
loadingCollection
);
}
}
@ -297,11 +302,12 @@ public class CollectionLoadContext {
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Collection fully initialized: %s",
MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session )
MessageHelper.collectionInfoString( persister, loadingCollection, lce.getKey(), session )
);
}
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
session.getFactory().getStatistics().loadCollection( persister.getRole() );
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.loadCollection( persister.getRole() );
}
}
@ -384,8 +390,9 @@ public class CollectionLoadContext {
// CollectionRegionAccessStrategy has no update, so avoid putting uncommitted data via putFromLoad
if (isPutFromLoad) {
final SessionEventListenerManager eventListenerManager = session.getEventListenerManager();
try {
session.getEventListenerManager().cachePutStart();
eventListenerManager.cachePutStart();
final boolean put = cacheAccess.putFromLoad(
session,
cacheKey,
@ -394,15 +401,16 @@ public class CollectionLoadContext {
factory.getSessionFactoryOptions().isMinimalPutsEnabled() && session.getCacheMode()!= CacheMode.REFRESH
);
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().collectionCachePut(
final StatisticsImplementor statistics = factory.getStatistics();
if ( put && statistics.isStatisticsEnabled() ) {
statistics.collectionCachePut(
persister.getNavigableRole(),
persister.getCacheAccessStrategy().getRegion().getName()
);
}
}
finally {
session.getEventListenerManager().cachePutEnd();
eventListenerManager.cachePutEnd();
}
}
}

View File

@ -29,6 +29,7 @@ import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.query.ParameterMetadata;
import org.hibernate.query.internal.ParameterMetadataImpl;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* Acts as a cache for compiled query plans, as well as query-parameter metadata.
@ -149,7 +150,8 @@ public class QueryPlanCache implements Serializable {
throws QueryException, MappingException {
final HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters );
HQLQueryPlan value = (HQLQueryPlan) queryPlanCache.get( key );
boolean stats = factory.getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = factory.getStatistics();
boolean stats = statistics.isStatisticsEnabled();
if ( value == null ) {
final long startTime = ( stats ) ? System.nanoTime() : 0L;
@ -160,7 +162,7 @@ public class QueryPlanCache implements Serializable {
if ( stats ) {
final long endTime = System.nanoTime();
final long microseconds = TimeUnit.MICROSECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
factory.getStatistics().queryCompiled( queryString, microseconds );
statistics.queryCompiled( queryString, microseconds );
}
queryPlanCache.putIfAbsent( key, value );
@ -169,7 +171,7 @@ public class QueryPlanCache implements Serializable {
LOG.tracev( "Located HQL query plan in cache ({0})", queryString );
if ( stats ) {
factory.getStatistics().queryPlanCacheHit( queryString );
statistics.queryPlanCacheHit( queryString );
}
}
return value;
@ -348,15 +350,16 @@ public class QueryPlanCache implements Serializable {
private DynamicFilterKey(FilterImpl filter) {
this.filterName = filter.getName();
if ( filter.getParameters().isEmpty() ) {
final Map<String, ?> parameters = filter.getParameters();
if ( parameters.isEmpty() ) {
parameterMetadata = Collections.emptyMap();
}
else {
parameterMetadata = new HashMap<String,Integer>(
CollectionHelper.determineProperSizing( filter.getParameters() ),
CollectionHelper.determineProperSizing( parameters ),
CollectionHelper.LOAD_FACTOR
);
for ( Object o : filter.getParameters().entrySet() ) {
for ( Object o : parameters.entrySet() ) {
final Map.Entry entry = (Map.Entry) o;
final String key = (String) entry.getKey();
final Integer valueCount;

View File

@ -8,11 +8,14 @@ package org.hibernate.event.internal;
import org.hibernate.FlushMode;
import org.hibernate.HibernateException;
import org.hibernate.engine.spi.ActionQueue;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.SessionEventListenerManager;
import org.hibernate.event.spi.AutoFlushEvent;
import org.hibernate.event.spi.AutoFlushEventListener;
import org.hibernate.event.spi.EventSource;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.jboss.logging.Logger;
@ -35,38 +38,41 @@ public class DefaultAutoFlushEventListener extends AbstractFlushingEventListener
*/
public void onAutoFlush(AutoFlushEvent event) throws HibernateException {
final EventSource source = event.getSession();
final SessionEventListenerManager eventListenerManager = source.getEventListenerManager();
try {
source.getEventListenerManager().partialFlushStart();
eventListenerManager.partialFlushStart();
if ( flushMightBeNeeded(source) ) {
if ( flushMightBeNeeded( source ) ) {
// Need to get the number of collection removals before flushing to executions
// (because flushing to executions can add collection removal actions to the action queue).
final int oldSize = source.getActionQueue().numberOfCollectionRemovals();
flushEverythingToExecutions(event);
if ( flushIsReallyNeeded(event, source) ) {
final ActionQueue actionQueue = source.getActionQueue();
final int oldSize = actionQueue.numberOfCollectionRemovals();
flushEverythingToExecutions( event );
if ( flushIsReallyNeeded( event, source ) ) {
LOG.trace( "Need to execute flush" );
// note: performExecutions() clears all collectionXxxxtion
// collections (the collection actions) in the session
performExecutions(source);
postFlush(source);
performExecutions( source );
postFlush( source );
postPostFlush( source );
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
source.getFactory().getStatistics().flush();
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.flush();
}
}
else {
LOG.trace( "Don't need to execute flush" );
source.getActionQueue().clearFromFlushNeededCheck( oldSize );
actionQueue.clearFromFlushNeededCheck( oldSize );
}
event.setFlushRequired( flushIsReallyNeeded( event, source ) );
}
}
finally {
source.getEventListenerManager().partialFlushEnd(
eventListenerManager.partialFlushEnd(
event.getNumberOfEntitiesProcessed(),
event.getNumberOfEntitiesProcessed()
);

View File

@ -43,11 +43,12 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
throw new HibernateException( "collection was evicted" );
}
if ( !collection.wasInitialized() ) {
final CollectionPersister ceLoadedPersister = ce.getLoadedPersister();
if ( LOG.isTraceEnabled() ) {
LOG.tracev(
"Initializing collection {0}",
MessageHelper.collectionInfoString(
ce.getLoadedPersister(),
ceLoadedPersister,
collection,
ce.getLoadedKey(),
source
@ -58,7 +59,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
final boolean foundInCache = initializeCollectionFromCache(
ce.getLoadedKey(),
ce.getLoadedPersister(),
ceLoadedPersister,
collection,
source
);
@ -72,7 +73,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
if ( LOG.isTraceEnabled() ) {
LOG.trace( "Collection not cached" );
}
ce.getLoadedPersister().initialize( ce.getLoadedKey(), source );
ceLoadedPersister.initialize( ce.getLoadedKey(), source );
if ( LOG.isTraceEnabled() ) {
LOG.trace( "Collection initialized" );
}
@ -80,7 +81,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.fetchCollection(
ce.getLoadedPersister().getRole()
ceLoadedPersister.getRole()
);
}
}
@ -119,7 +120,7 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
final SessionFactoryImplementor factory = source.getFactory();
final CollectionDataAccess cacheAccessStrategy = persister.getCacheAccessStrategy();
final Object ck = cacheAccessStrategy.generateCacheKey( id, persister, factory, source.getTenantIdentifier() );
final Object ce = CacheHelper.fromSharedCache( source, ck, persister.getCacheAccessStrategy() );
final Object ce = CacheHelper.fromSharedCache( source, ck, cacheAccessStrategy );
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {

View File

@ -38,6 +38,7 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.proxy.LazyInitializer;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.ForeignKeyDirection;
import org.hibernate.type.TypeHelper;
@ -333,8 +334,9 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener impleme
);
}
else if ( isVersionChanged( entity, source, persister, target ) ) {
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
source.getFactory().getStatistics().optimisticFailure( entityName );
final StatisticsImplementor statistics = source.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( entityName );
}
throw new StaleObjectStateException( entityName, id );
}

View File

@ -20,6 +20,7 @@ import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* Defines the default load event listeners used by hibernate for loading entities
@ -112,7 +113,8 @@ public class DefaultResolveNaturalIdEventListener
protected Serializable loadFromDatasource(final ResolveNaturalIdEvent event) {
final EventSource session = event.getSession();
final SessionFactoryImplementor factory = session.getFactory();
final boolean stats = factory.getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = factory.getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
@ -127,7 +129,7 @@ public class DefaultResolveNaturalIdEventListener
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
factory.getStatistics().naturalIdQueryExecuted(
statistics.naturalIdQueryExecuted(
event.getEntityPersister().getRootEntityName(),
milliseconds
);

View File

@ -94,8 +94,9 @@ public class SQLExceptionConverterFactory {
// First, try to find a matching constructor accepting a ViolatedConstraintNameExtracter param...
final Constructor[] ctors = converterClass.getDeclaredConstructors();
for ( Constructor ctor : ctors ) {
if ( ctor.getParameterTypes() != null && ctor.getParameterCount() == 1 ) {
if ( ViolatedConstraintNameExtracter.class.isAssignableFrom( ctor.getParameterTypes()[0] ) ) {
final Class[] parameterTypes = ctor.getParameterTypes();
if ( parameterTypes != null && ctor.getParameterCount() == 1 ) {
if ( ViolatedConstraintNameExtracter.class.isAssignableFrom( parameterTypes[0] ) ) {
try {
return (SQLExceptionConverter) ctor.newInstance( violatedConstraintNameExtracter );
}

View File

@ -62,6 +62,7 @@ import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.sql.JoinFragment;
import org.hibernate.sql.JoinType;
import org.hibernate.sql.QuerySelect;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.AssociationType;
import org.hibernate.type.EntityType;
@ -1045,7 +1046,8 @@ public class QueryTranslatorImpl extends BasicLoader implements FilterTranslator
public Iterator iterate(QueryParameters queryParameters, EventSource session)
throws HibernateException {
boolean stats = session.getFactory().getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = session.getFactory().getStatistics();
boolean stats = statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
@ -1078,7 +1080,7 @@ public class QueryTranslatorImpl extends BasicLoader implements FilterTranslator
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
session.getFactory().getStatistics().queryExecuted(
statistics.queryExecuted(
"HQL: " + queryString,
0,
milliseconds

View File

@ -14,6 +14,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
@ -22,6 +23,7 @@ import org.hibernate.hql.internal.ast.HqlSqlWalker;
import org.hibernate.hql.internal.ast.tree.AssignmentSpecification;
import org.hibernate.hql.spi.id.MultiTableBulkIdStrategy;
import org.hibernate.param.ParameterSpecification;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.Update;
/**
@ -57,8 +59,9 @@ public abstract class AbstractInlineIdsUpdateHandlerImpl
if ( !values.getIds().isEmpty() ) {
String[] tableNames = getTargetedQueryable().getConstraintOrderedTableNameClosure();
String[][] columnNames = getTargetedQueryable().getContraintOrderedTableKeyColumnClosure();
final Queryable targetedQueryable = getTargetedQueryable();
String[] tableNames = targetedQueryable.getConstraintOrderedTableNameClosure();
String[][] columnNames = targetedQueryable.getContraintOrderedTableKeyColumnClosure();
String idSubselect = values.toStatement();
@ -85,6 +88,8 @@ public abstract class AbstractInlineIdsUpdateHandlerImpl
}
}
final JdbcCoordinator jdbcCoordinator = session
.getJdbcCoordinator();
// Start performing the updates
for ( Map.Entry<Integer, String> updateEntry: updates.entrySet()) {
int i = updateEntry.getKey();
@ -94,8 +99,7 @@ public abstract class AbstractInlineIdsUpdateHandlerImpl
continue;
}
try {
try (PreparedStatement ps = session
.getJdbcCoordinator().getStatementPreparer()
try (PreparedStatement ps = jdbcCoordinator.getStatementPreparer()
.prepareStatement( update, false )) {
int position = 1; // jdbc params are 1-based
if ( assignmentParameterSpecifications[i] != null ) {
@ -104,8 +108,7 @@ public abstract class AbstractInlineIdsUpdateHandlerImpl
.bind( ps, queryParameters, session, position );
}
}
session
.getJdbcCoordinator().getResultSetReturn()
jdbcCoordinator.getResultSetReturn()
.executeUpdate( ps );
}
}

View File

@ -10,6 +10,7 @@ import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.id.PostInsertIdentityPersister;
import org.hibernate.pretty.MessageHelper;
@ -63,8 +64,9 @@ public abstract class AbstractReturningDelegate implements InsertGeneratedIdenti
protected abstract Serializable executeAndExtract(PreparedStatement insert, SharedSessionContractImplementor session)
throws SQLException;
protected void releaseStatement(PreparedStatement insert, SharedSessionContractImplementor session) throws SQLException {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( insert );
session.getJdbcCoordinator().afterStatementExecution();
protected void releaseStatement(PreparedStatement insert, SharedSessionContractImplementor session) {
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( insert );
jdbcCoordinator.afterStatementExecution();
}
}

View File

@ -11,6 +11,7 @@ import java.sql.Connection;
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.stat.spi.StatisticsImplementor;
/**
* @author Steve Ebersole
@ -24,8 +25,9 @@ public class ConnectionObserverStatsBridge implements ConnectionObserver, Serial
@Override
public void physicalConnectionObtained(Connection connection) {
if ( sessionFactory.getStatistics().isStatisticsEnabled() ) {
sessionFactory.getStatistics().connect();
final StatisticsImplementor statistics = sessionFactory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.connect();
}
}
@ -39,8 +41,9 @@ public class ConnectionObserverStatsBridge implements ConnectionObserver, Serial
@Override
public void statementPrepared() {
if ( sessionFactory.getStatistics().isStatisticsEnabled() ) {
sessionFactory.getStatistics().prepareStatement();
final StatisticsImplementor statistics = sessionFactory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.prepareStatement();
}
}
}

View File

@ -19,6 +19,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.function.Supplier;
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import javax.persistence.EntityGraph;
@ -79,6 +80,7 @@ import org.hibernate.engine.spi.NamedQueryDefinitionBuilder;
import org.hibernate.engine.spi.NamedSQLQueryDefinition;
import org.hibernate.engine.spi.NamedSQLQueryDefinitionBuilder;
import org.hibernate.engine.spi.SessionBuilderImplementor;
import org.hibernate.engine.spi.SessionEventListenerManager;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionOwner;
import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
@ -1110,30 +1112,33 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
}
// prefer the SF-scoped interceptor, prefer that to any Session-scoped interceptor prototype
if ( options.getInterceptor() != null && options.getInterceptor() != EmptyInterceptor.INSTANCE ) {
return options.getInterceptor();
final Interceptor optionsInterceptor = options.getInterceptor();
if ( optionsInterceptor != null && optionsInterceptor != EmptyInterceptor.INSTANCE ) {
return optionsInterceptor;
}
// then check the Session-scoped interceptor prototype
if ( options.getStatelessInterceptorImplementor() != null && options.getStatelessInterceptorImplementorSupplier() != null ) {
final Class<? extends Interceptor> statelessInterceptorImplementor = options.getStatelessInterceptorImplementor();
final Supplier<? extends Interceptor> statelessInterceptorImplementorSupplier = options.getStatelessInterceptorImplementorSupplier();
if ( statelessInterceptorImplementor != null && statelessInterceptorImplementorSupplier != null ) {
throw new HibernateException(
"A session scoped interceptor class or supplier are allowed, but not both!" );
}
else if ( options.getStatelessInterceptorImplementor() != null ) {
else if ( statelessInterceptorImplementor != null ) {
try {
/**
* We could remove the getStatelessInterceptorImplementor method and use just the getStatelessInterceptorImplementorSupplier
* since it can cover both cases when the user has given a Supplier<? extends Interceptor> or just the
* Class<? extends Interceptor>, in which case, we simply instantiate the Interceptor when calling the Supplier.
*/
return options.getStatelessInterceptorImplementor().newInstance();
return statelessInterceptorImplementor.newInstance();
}
catch (InstantiationException | IllegalAccessException e) {
throw new HibernateException( "Could not supply session-scoped SessionFactory Interceptor", e );
}
}
else if ( options.getStatelessInterceptorImplementorSupplier() != null ) {
return options.getStatelessInterceptorImplementorSupplier().get();
else if ( statelessInterceptorImplementorSupplier != null ) {
return statelessInterceptorImplementorSupplier.get();
}
return null;
@ -1167,20 +1172,22 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
this.sessionOwner = null;
// set up default builder values...
this.statementInspector = sessionFactory.getSessionFactoryOptions().getStatementInspector();
this.connectionHandlingMode = sessionFactory.getSessionFactoryOptions().getPhysicalConnectionHandlingMode();
this.autoClose = sessionFactory.getSessionFactoryOptions().isAutoCloseSessionEnabled();
this.flushMode = sessionFactory.getSessionFactoryOptions().isFlushBeforeCompletionEnabled()
final SessionFactoryOptions sessionFactoryOptions = sessionFactory.getSessionFactoryOptions();
this.statementInspector = sessionFactoryOptions.getStatementInspector();
this.connectionHandlingMode = sessionFactoryOptions.getPhysicalConnectionHandlingMode();
this.autoClose = sessionFactoryOptions.isAutoCloseSessionEnabled();
this.flushMode = sessionFactoryOptions.isFlushBeforeCompletionEnabled()
? FlushMode.AUTO
: FlushMode.MANUAL;
if ( sessionFactory.getCurrentTenantIdentifierResolver() != null ) {
tenantIdentifier = sessionFactory.getCurrentTenantIdentifierResolver().resolveCurrentTenantIdentifier();
final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = sessionFactory.getCurrentTenantIdentifierResolver();
if ( currentTenantIdentifierResolver != null ) {
tenantIdentifier = currentTenantIdentifierResolver.resolveCurrentTenantIdentifier();
}
this.jdbcTimeZone = sessionFactory.getSessionFactoryOptions().getJdbcTimeZone();
this.jdbcTimeZone = sessionFactoryOptions.getJdbcTimeZone();
listeners = sessionFactory.getSessionFactoryOptions().getBaselineSessionEventsListenerBuilder().buildBaselineList();
queryParametersValidationEnabled = sessionFactory.getSessionFactoryOptions().isQueryParametersValidationEnabled();
listeners = sessionFactoryOptions.getBaselineSessionEventsListenerBuilder().buildBaselineList();
queryParametersValidationEnabled = sessionFactoryOptions.isQueryParametersValidationEnabled();
}
@ -1287,8 +1294,9 @@ public final class SessionFactoryImpl implements SessionFactoryImplementor {
log.tracef( "Opening Hibernate Session. tenant=%s, owner=%s", tenantIdentifier, sessionOwner );
final SessionImpl session = new SessionImpl( sessionFactory, this );
final SessionEventListenerManager eventListenerManager = session.getEventListenerManager();
for ( SessionEventListener listener : listeners ) {
session.getEventListenerManager().addListener( listener );
eventListenerManager.addListener( listener );
}
return session;

View File

@ -178,6 +178,7 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinator;
import org.hibernate.resource.transaction.spi.TransactionStatus;
import org.hibernate.stat.SessionStatistics;
import org.hibernate.stat.internal.SessionStatisticsImpl;
import org.hibernate.stat.spi.StatisticsImplementor;
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_SCOPE;
import static org.hibernate.cfg.AvailableSettings.JPA_LOCK_TIMEOUT;
@ -257,19 +258,24 @@ public final class SessionImpl
this.autoClose = options.shouldAutoClose();
this.queryParametersValidationEnabled = options.isQueryParametersValidationEnabled();
this.discardOnClose = getFactory().getSessionFactoryOptions().isReleaseResourcesOnCloseEnabled();
this.discardOnClose = factory.getSessionFactoryOptions().isReleaseResourcesOnCloseEnabled();
if ( options instanceof SharedSessionCreationOptions && ( (SharedSessionCreationOptions) options ).isTransactionCoordinatorShared() ) {
if ( options instanceof SharedSessionCreationOptions ) {
final SharedSessionCreationOptions sharedOptions = (SharedSessionCreationOptions) options;
if ( sharedOptions.getTransactionCompletionProcesses() != null ) {
actionQueue.setTransactionCompletionProcesses( sharedOptions.getTransactionCompletionProcesses(), true );
final ActionQueue.TransactionCompletionProcesses transactionCompletionProcesses = sharedOptions.getTransactionCompletionProcesses();
if ( sharedOptions.isTransactionCoordinatorShared() && transactionCompletionProcesses != null ) {
actionQueue.setTransactionCompletionProcesses(
transactionCompletionProcesses,
true
);
}
}
loadQueryInfluencers = new LoadQueryInfluencers( factory );
if ( getFactory().getStatistics().isStatisticsEnabled() ) {
getFactory().getStatistics().openSession();
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.openSession();
}
// NOTE : pulse() already handles auto-join-ability correctly
@ -418,7 +424,8 @@ public final class SessionImpl
}
// todo : we want this check if usage is JPA, but not native Hibernate usage
if ( getSessionFactory().getSessionFactoryOptions().isJpaBootstrap() ) {
final SessionFactoryImplementor sessionFactory = getSessionFactory();
if ( sessionFactory.getSessionFactoryOptions().isJpaBootstrap() ) {
// Original hibernate-entitymanager EM#close behavior
checkSessionFactoryOpen();
checkOpenOrWaitingForAutoClose();
@ -435,8 +442,9 @@ public final class SessionImpl
super.close();
}
if ( getFactory().getStatistics().isStatisticsEnabled() ) {
getFactory().getStatistics().closeSession();
final StatisticsImplementor statistics = sessionFactory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.closeSession();
}
}
@ -2572,8 +2580,9 @@ public final class SessionImpl
getEventListenerManager().transactionCompletion( successful );
if ( getFactory().getStatistics().isStatisticsEnabled() ) {
getFactory().getStatistics().endTransaction( successful );
final StatisticsImplementor statistics = getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.endTransaction( successful );
}
try {

View File

@ -312,13 +312,14 @@ public class StatelessSessionImpl extends AbstractSharedSessionContract implemen
// we could not use bytecode proxy, check to see if we can use HibernateProxy
if ( persister.hasProxy() ) {
final Object existingProxy = getPersistenceContext().getProxy( entityKey );
final PersistenceContext persistenceContext = getPersistenceContext();
final Object existingProxy = persistenceContext.getProxy( entityKey );
if ( existingProxy != null ) {
return getPersistenceContext().narrowProxy( existingProxy, persister, entityKey, null );
return persistenceContext.narrowProxy( existingProxy, persister, entityKey, null );
}
else {
final Object proxy = persister.createProxy( id, this );
getPersistenceContext().addProxy( entityKey, proxy );
persistenceContext.addProxy( entityKey, proxy );
return proxy;
}
}

View File

@ -1,40 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.internal.util.xml;
import java.io.Serializable;
import org.dom4j.Document;
/**
* Basic implemementation of {@link XmlDocument}
*
* @author Steve Ebersole
*/
public class XmlDocumentImpl implements XmlDocument, Serializable {
private final Document documentTree;
private final Origin origin;
public XmlDocumentImpl(Document documentTree, String originType, String originName) {
this( documentTree, new OriginImpl( originType, originName ) );
}
public XmlDocumentImpl(Document documentTree, Origin origin) {
this.documentTree = documentTree;
this.origin = origin;
}
@Override
public Document getDocumentTree() {
return documentTree;
}
@Override
public Origin getOrigin() {
return origin;
}
}

View File

@ -46,6 +46,7 @@ import org.hibernate.dialect.pagination.NoopLimitHandler;
import org.hibernate.engine.internal.CacheHelper;
import org.hibernate.engine.internal.TwoPhaseLoad;
import org.hibernate.engine.jdbc.ColumnNameCache;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey;
@ -83,6 +84,7 @@ import org.hibernate.persister.entity.UniqueKeyLoadable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.transform.CacheableResultTransformer;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.AssociationType;
@ -968,8 +970,9 @@ public abstract class Loader {
);
}
finally {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
@ -1545,8 +1548,9 @@ public abstract class Loader {
null
);
if ( !versionType.isEqual( version, currentVersion ) ) {
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
session.getFactory().getStatistics().optimisticFailure( persister.getEntityName() );
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( persister.getEntityName() );
}
throw new StaleObjectStateException( persister.getEntityName(), id );
}
@ -2730,12 +2734,13 @@ public abstract class Loader {
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
if ( factory.getStatistics().isStatisticsEnabled() ) {
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( result == null ) {
factory.getStatistics().queryCacheMiss( getQueryIdentifier(), queryCache.getRegion().getName() );
statistics.queryCacheMiss( getQueryIdentifier(), queryCache.getRegion().getName() );
}
else {
factory.getStatistics().queryCacheHit( getQueryIdentifier(), queryCache.getRegion().getName() );
statistics.queryCacheHit( getQueryIdentifier(), queryCache.getRegion().getName() );
}
}
}
@ -2761,8 +2766,9 @@ public abstract class Loader {
key.getResultTransformer().getCachedResultTypes( resultTypes ),
session
);
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatistics().queryCachePut( getQueryIdentifier(), queryCache.getRegion().getName() );
final StatisticsImplementor statistics = factory.getStatistics();
if ( put && statistics.isStatisticsEnabled() ) {
statistics.queryCachePut( getQueryIdentifier(), queryCache.getRegion().getName() );
}
}
}
@ -2782,7 +2788,8 @@ public abstract class Loader {
final ResultTransformer forcedResultTransformer)
throws HibernateException {
final boolean stats = getFactory().getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = getFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
@ -2803,7 +2810,7 @@ public abstract class Loader {
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
getFactory().getStatistics().queryExecuted(
statistics.queryExecuted(
getQueryIdentifier(),
result.size(),
milliseconds
@ -2856,8 +2863,9 @@ public abstract class Loader {
final SharedSessionContractImplementor session) throws HibernateException {
checkScrollability();
final StatisticsImplementor statistics = getFactory().getStatistics();
final boolean stats = getQueryIdentifier() != null &&
getFactory().getStatistics().isStatisticsEnabled();
statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
@ -2878,7 +2886,7 @@ public abstract class Loader {
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
getFactory().getStatistics().queryExecuted(
statistics.queryExecuted(
getQueryIdentifier(),
0,
milliseconds

View File

@ -40,6 +40,7 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.stat.internal.StatsHelper;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.Type;
import org.hibernate.type.TypeHelper;
@ -192,23 +193,25 @@ public class CacheEntityLoaderHelper extends AbstractLockUpgradeEventListener {
final EntityPersister persister,
SessionImplementor source) {
final EntityDataAccess cache = persister.getCacheAccessStrategy();
final SessionFactoryImplementor factory = source.getFactory();
final Object ck = cache.generateCacheKey(
event.getEntityId(),
persister,
source.getFactory(),
factory,
source.getTenantIdentifier()
);
final Object ce = CacheHelper.fromSharedCache( source, ck, persister.getCacheAccessStrategy() );
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( ce == null ) {
source.getFactory().getStatistics().entityCacheMiss(
statistics.entityCacheMiss(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
cache.getRegion().getName()
);
}
else {
source.getFactory().getStatistics().entityCacheHit(
statistics.entityCacheHit(
StatsHelper.INSTANCE.getRootEntityRole( persister ),
cache.getRegion().getName()
);

View File

@ -45,6 +45,7 @@ import org.hibernate.persister.entity.Loadable;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
@ -514,7 +515,8 @@ public class QueryLoader extends BasicLoader {
QueryParameters queryParameters,
EventSource session) throws HibernateException {
checkQuery( queryParameters );
final boolean stats = session.getFactory().getStatistics().isStatisticsEnabled();
final StatisticsImplementor statistics = session.getFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
@ -545,7 +547,7 @@ public class QueryLoader extends BasicLoader {
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
session.getFactory().getStatistics().queryExecuted(
statistics.queryExecuted(
// "HQL: " + queryTranslator.getQueryString(),
getQueryIdentifier(),
0,

View File

@ -33,6 +33,7 @@ import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.batch.internal.BasicBatchKey;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
@ -1301,6 +1302,7 @@ public abstract class AbstractCollectionPersister
try {
// create all the new entries
Iterator entries = collection.entries( this );
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
if ( entries.hasNext() ) {
Expectation expectation = Expectations.appropriateExpectation( getInsertCheckStyle() );
collection.preInsert( this );
@ -1323,14 +1325,12 @@ public abstract class AbstractCollectionPersister
expectation
);
}
st = session
.getJdbcCoordinator()
st = jdbcCoordinator
.getBatch( recreateBatchKey )
.getBatchStatement( sql, callable );
}
else {
st = session
.getJdbcCoordinator()
st = jdbcCoordinator
.getStatementPreparer()
.prepareStatement( sql, callable );
}
@ -1349,13 +1349,13 @@ public abstract class AbstractCollectionPersister
loc = writeElement( st, collection.getElement( entry ), loc, session );
if ( useBatch ) {
session
.getJdbcCoordinator()
jdbcCoordinator
.getBatch( recreateBatchKey )
.addToBatch();
}
else {
expectation.verifyOutcome( session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st ), st, -1 );
expectation.verifyOutcome( jdbcCoordinator
.getResultSetReturn().executeUpdate( st ), st, -1 );
}
collection.afterRowInsert( this, entry, i );
@ -1363,14 +1363,14 @@ public abstract class AbstractCollectionPersister
}
catch ( SQLException sqle ) {
if ( useBatch ) {
session.getJdbcCoordinator().abortBatch();
jdbcCoordinator.abortBatch();
}
throw sqle;
}
finally {
if ( !useBatch ) {
session.getJdbcCoordinator().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
@ -1957,23 +1957,23 @@ public abstract class AbstractCollectionPersister
@Override
public int getSize(Serializable key, SharedSessionContractImplementor session) {
try {
PreparedStatement st = session
.getJdbcCoordinator()
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
PreparedStatement st = jdbcCoordinator
.getStatementPreparer()
.prepareStatement( sqlSelectSizeString );
try {
getKeyType().nullSafeSet( st, key, 1, session );
ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
return rs.next() ? rs.getInt( 1 ) - baseIndex : 0;
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( rs, st );
jdbcCoordinator.getResourceRegistry().release( rs, st );
}
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException sqle ) {
@ -1998,27 +1998,27 @@ public abstract class AbstractCollectionPersister
private boolean exists(Serializable key, Object indexOrElement, Type indexOrElementType, String sql, SharedSessionContractImplementor session) {
try {
PreparedStatement st = session
.getJdbcCoordinator()
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
PreparedStatement st = jdbcCoordinator
.getStatementPreparer()
.prepareStatement( sql );
try {
getKeyType().nullSafeSet( st, key, 1, session );
indexOrElementType.nullSafeSet( st, indexOrElement, keyColumnNames.length + 1, session );
ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
return rs.next();
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( rs, st );
jdbcCoordinator.getResourceRegistry().release( rs, st );
}
}
catch ( TransientObjectException e ) {
return false;
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException sqle ) {
@ -2034,14 +2034,14 @@ public abstract class AbstractCollectionPersister
@Override
public Object getElementByIndex(Serializable key, Object index, SharedSessionContractImplementor session, Object owner) {
try {
PreparedStatement st = session
.getJdbcCoordinator()
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
PreparedStatement st = jdbcCoordinator
.getStatementPreparer()
.prepareStatement( sqlSelectRowByIndexString );
try {
getKeyType().nullSafeSet( st, key, 1, session );
getIndexType().nullSafeSet( st, incrementIndexByBase( index ), keyColumnNames.length + 1, session );
ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
ResultSet rs = jdbcCoordinator.getResultSetReturn().extract( st );
try {
if ( rs.next() ) {
return getElementType().nullSafeGet( rs, elementColumnAliases, session, owner );
@ -2051,12 +2051,12 @@ public abstract class AbstractCollectionPersister
}
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( rs, st );
jdbcCoordinator.getResourceRegistry().release( rs, st );
}
}
finally {
session.getJdbcCoordinator().getResourceRegistry().release( st );
session.getJdbcCoordinator().afterStatementExecution();
jdbcCoordinator.getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException sqle ) {

View File

@ -129,6 +129,7 @@ import org.hibernate.sql.SelectFragment;
import org.hibernate.sql.SimpleSelect;
import org.hibernate.sql.Template;
import org.hibernate.sql.Update;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.tuple.GenerationTiming;
import org.hibernate.tuple.InDatabaseValueGenerationStrategy;
import org.hibernate.tuple.InMemoryValueGenerationStrategy;
@ -2554,8 +2555,9 @@ public abstract class AbstractEntityPersister
}
catch (StaleStateException e) {
if ( !isNullableTable( tableNumber ) ) {
if ( getFactory().getStatistics().isStatisticsEnabled() ) {
getFactory().getStatistics().optimisticFailure( getEntityName() );
final StatisticsImplementor statistics = getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( getEntityName() );
}
throw new StaleObjectStateException( getEntityName(), id );
}

View File

@ -946,21 +946,25 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
}
protected void collectHints(Map<String, Object> hints) {
if ( getQueryOptions().getTimeout() != null ) {
hints.put( HINT_TIMEOUT, getQueryOptions().getTimeout() );
hints.put( SPEC_HINT_TIMEOUT, getQueryOptions().getTimeout() * 1000 );
final RowSelection queryOptions = getQueryOptions();
final Integer queryTimeout = queryOptions.getTimeout();
if ( queryTimeout != null ) {
hints.put( HINT_TIMEOUT, queryTimeout );
hints.put( SPEC_HINT_TIMEOUT, queryTimeout * 1000 );
}
if ( getLockOptions().getTimeOut() != WAIT_FOREVER ) {
hints.put( JPA_LOCK_TIMEOUT, getLockOptions().getTimeOut() );
final LockOptions lockOptions = getLockOptions();
final int lockOptionsTimeOut = lockOptions.getTimeOut();
if ( lockOptionsTimeOut != WAIT_FOREVER ) {
hints.put( JPA_LOCK_TIMEOUT, lockOptionsTimeOut );
}
if ( getLockOptions().getScope() ) {
hints.put( JPA_LOCK_SCOPE, getLockOptions().getScope() );
if ( lockOptions.getScope() ) {
hints.put( JPA_LOCK_SCOPE, lockOptions.getScope() );
}
if ( getLockOptions().hasAliasSpecificLockModes() && canApplyAliasSpecificLockModeHints() ) {
for ( Map.Entry<String, LockMode> entry : getLockOptions().getAliasSpecificLocks() ) {
if ( lockOptions.hasAliasSpecificLockModes() && canApplyAliasSpecificLockModeHints() ) {
for ( Map.Entry<String, LockMode> entry : lockOptions.getAliasSpecificLocks() ) {
hints.put(
ALIAS_SPECIFIC_LOCK_MODE + '.' + entry.getKey(),
entry.getValue().name()
@ -969,7 +973,7 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
}
putIfNotNull( hints, HINT_COMMENT, getComment() );
putIfNotNull( hints, HINT_FETCH_SIZE, getQueryOptions().getFetchSize() );
putIfNotNull( hints, HINT_FETCH_SIZE, queryOptions.getFetchSize() );
putIfNotNull( hints, HINT_FLUSH_MODE, getHibernateFlushMode() );
if ( cacheStoreMode != null || cacheRetrieveMode != null ) {

View File

@ -305,7 +305,8 @@ public abstract class AbstractServiceRegistryImpl
@SuppressWarnings({ "unchecked" })
private <T extends Service> void processInjection(T service, Method injectionMethod, InjectService injectService) {
if ( injectionMethod.getParameterTypes() == null || injectionMethod.getParameterCount() != 1 ) {
final Class<?>[] parameterTypes = injectionMethod.getParameterTypes();
if ( parameterTypes == null || injectionMethod.getParameterCount() != 1 ) {
throw new ServiceDependencyException(
"Encountered @InjectService on method with unexpected number of parameters"
);
@ -313,7 +314,7 @@ public abstract class AbstractServiceRegistryImpl
Class dependentServiceRole = injectService.serviceRole();
if ( dependentServiceRole == null || dependentServiceRole.equals( Void.class ) ) {
dependentServiceRole = injectionMethod.getParameterTypes()[0];
dependentServiceRole = parameterTypes[0];
}
// todo : because of the use of proxies, this is no longer returning null here...