HHH-7908 Logging level checking

Conflicts:
	hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java
This commit is contained in:
Nikolay Shestakov 2013-04-12 09:56:41 -04:00 committed by Brett Meyer
parent 03daecfe49
commit cbbadea538
26 changed files with 139 additions and 84 deletions

View File

@ -204,11 +204,13 @@ public class UnresolvedEntityInsertActions {
if ( entityEntry.getStatus() != Status.MANAGED && entityEntry.getStatus() != Status.READ_ONLY ) { if ( entityEntry.getStatus() != Status.MANAGED && entityEntry.getStatus() != Status.READ_ONLY ) {
throw new IllegalArgumentException( "EntityEntry did not have status MANAGED or READ_ONLY: " + entityEntry ); throw new IllegalArgumentException( "EntityEntry did not have status MANAGED or READ_ONLY: " + entityEntry );
} }
final boolean traceEnabled = LOG.isTraceEnabled();
// Find out if there are any unresolved insertions that are waiting for the // Find out if there are any unresolved insertions that are waiting for the
// specified entity to be resolved. // specified entity to be resolved.
Set<AbstractEntityInsertAction> dependentActions = dependentActionsByTransientEntity.remove( managedEntity ); Set<AbstractEntityInsertAction> dependentActions = dependentActionsByTransientEntity.remove( managedEntity );
if ( dependentActions == null ) { if ( dependentActions == null ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( LOG.tracev(
"No unresolved entity inserts that depended on [{0}]", "No unresolved entity inserts that depended on [{0}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ) MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() )
@ -217,7 +219,7 @@ public class UnresolvedEntityInsertActions {
return Collections.emptySet(); //NOTE EARLY EXIT! return Collections.emptySet(); //NOTE EARLY EXIT!
} }
Set<AbstractEntityInsertAction> resolvedActions = new IdentitySet( ); Set<AbstractEntityInsertAction> resolvedActions = new IdentitySet( );
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( LOG.tracev(
"Unresolved inserts before resolving [{0}]: [{1}]", "Unresolved inserts before resolving [{0}]: [{1}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ), MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),
@ -225,7 +227,7 @@ public class UnresolvedEntityInsertActions {
); );
} }
for ( AbstractEntityInsertAction dependentAction : dependentActions ) { for ( AbstractEntityInsertAction dependentAction : dependentActions ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( LOG.tracev(
"Resolving insert [{0}] dependency on [{1}]", "Resolving insert [{0}] dependency on [{1}]",
MessageHelper.infoString( dependentAction.getEntityName(), dependentAction.getId() ), MessageHelper.infoString( dependentAction.getEntityName(), dependentAction.getId() ),
@ -235,7 +237,7 @@ public class UnresolvedEntityInsertActions {
NonNullableTransientDependencies dependencies = dependenciesByAction.get( dependentAction ); NonNullableTransientDependencies dependencies = dependenciesByAction.get( dependentAction );
dependencies.resolveNonNullableTransientEntity( managedEntity ); dependencies.resolveNonNullableTransientEntity( managedEntity );
if ( dependencies.isEmpty() ) { if ( dependencies.isEmpty() ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( LOG.tracev(
"Resolving insert [{0}] (only depended on [{1}])", "Resolving insert [{0}] (only depended on [{1}])",
dependentAction, dependentAction,
@ -247,7 +249,7 @@ public class UnresolvedEntityInsertActions {
resolvedActions.add( dependentAction ); resolvedActions.add( dependentAction );
} }
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( LOG.tracev(
"Unresolved inserts after resolving [{0}]: [{1}]", "Unresolved inserts after resolving [{0}]: [{1}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ), MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),

View File

@ -1476,7 +1476,8 @@ public final class AnnotationBinder {
* ordering does not matter * ordering does not matter
*/ */
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Processing annotations of {0}.{1}" , propertyHolder.getEntityName(), inferredData.getPropertyName() ); LOG.tracev( "Processing annotations of {0}.{1}" , propertyHolder.getEntityName(), inferredData.getPropertyName() );
} }
@ -1542,7 +1543,7 @@ public final class AnnotationBinder {
+ propertyHolder.getEntityName() + propertyHolder.getEntityName()
); );
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "{0} is a version property", inferredData.getPropertyName() ); LOG.tracev( "{0} is a version property", inferredData.getPropertyName() );
} }
RootClass rootClass = ( RootClass ) propertyHolder.getPersistentClass(); RootClass rootClass = ( RootClass ) propertyHolder.getPersistentClass();
@ -1568,7 +1569,7 @@ public final class AnnotationBinder {
SimpleValue simpleValue = ( SimpleValue ) prop.getValue(); SimpleValue simpleValue = ( SimpleValue ) prop.getValue();
simpleValue.setNullValue( "undefined" ); simpleValue.setNullValue( "undefined" );
rootClass.setOptimisticLockMode( Versioning.OPTIMISTIC_LOCK_VERSION ); rootClass.setOptimisticLockMode( Versioning.OPTIMISTIC_LOCK_VERSION );
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Version name: {0}, unsavedValue: {1}", rootClass.getVersion().getName(), LOG.tracev( "Version name: {0}, unsavedValue: {1}", rootClass.getVersion().getName(),
( (SimpleValue) rootClass.getVersion().getValue() ).getNullValue() ); ( (SimpleValue) rootClass.getVersion().getValue() ).getNullValue() );
} }

View File

@ -61,12 +61,15 @@ public abstract class CollectionSecondPass implements SecondPass {
public void doSecondPass(java.util.Map persistentClasses) public void doSecondPass(java.util.Map persistentClasses)
throws MappingException { throws MappingException {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Second pass for collection: %s", collection.getRole() ); LOG.debugf( "Second pass for collection: %s", collection.getRole() );
}
secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map
collection.createAllKeys(); collection.createAllKeys();
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
String msg = "Mapped collection key: " + columns( collection.getKey() ); String msg = "Mapped collection key: " + columns( collection.getKey() );
if ( collection.isIndexed() ) if ( collection.isIndexed() )
msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() ); msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() );

View File

@ -2657,6 +2657,7 @@ public final class HbmBinder {
"not valid within collection using join fetching [" + collection.getRole() + "]" "not valid within collection using join fetching [" + collection.getRole() + "]"
); );
} }
final boolean debugEnabled = LOG.isDebugEnabled();
while ( filters.hasNext() ) { while ( filters.hasNext() ) {
final Element filterElement = ( Element ) filters.next(); final Element filterElement = ( Element ) filters.next();
final String name = filterElement.attributeValue( "name" ); final String name = filterElement.attributeValue( "name" );
@ -2674,7 +2675,7 @@ public final class HbmBinder {
Element alias = (Element) aliasesIterator.next(); Element alias = (Element) aliasesIterator.next();
aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table")); aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table"));
} }
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( "Applying many-to-many filter [%s] as [%s] to role [%s]", name, condition, collection.getRole() ); LOG.debugf( "Applying many-to-many filter [%s] as [%s] to role [%s]", name, condition, collection.getRole() );
} }
String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection"); String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection");

View File

@ -814,7 +814,9 @@ public abstract class CollectionBinder {
boolean ignoreNotFound, boolean ignoreNotFound,
Mappings mappings, Mappings mappings,
Map<XClass, InheritanceState> inheritanceStatePerClass) { Map<XClass, InheritanceState> inheritanceStatePerClass) {
if ( LOG.isDebugEnabled() ) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Binding a OneToMany: %s.%s through a foreign key", propertyHolder.getEntityName(), propertyName ); LOG.debugf( "Binding a OneToMany: %s.%s through a foreign key", propertyHolder.getEntityName(), propertyName );
} }
org.hibernate.mapping.OneToMany oneToMany = new org.hibernate.mapping.OneToMany( mappings, collection.getOwner() ); org.hibernate.mapping.OneToMany oneToMany = new org.hibernate.mapping.OneToMany( mappings, collection.getOwner() );
@ -855,7 +857,7 @@ public abstract class CollectionBinder {
column.setJoins( joins ); column.setJoins( joins );
collection.setCollectionTable( column.getTable() ); collection.setCollectionTable( column.getTable() );
} }
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() ); LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() );
} }
bindFilters( false ); bindFilters( false );

View File

@ -146,7 +146,8 @@ public final class Cascade {
throws HibernateException { throws HibernateException {
if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Processing cascade {0} for: {1}", action, persister.getEntityName() ); LOG.tracev( "Processing cascade {0} for: {1}", action, persister.getEntityName() );
} }
@ -183,7 +184,7 @@ public final class Cascade {
} }
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Done processing cascade {0} for: {1}", action, persister.getEntityName() ); LOG.tracev( "Done processing cascade {0} for: {1}", action, persister.getEntityName() );
} }
} }
@ -407,7 +408,8 @@ public final class Cascade {
boolean reallyDoCascade = style.reallyDoCascade(action) && child!=CollectionType.UNFETCHED_COLLECTION; boolean reallyDoCascade = style.reallyDoCascade(action) && child!=CollectionType.UNFETCHED_COLLECTION;
if ( reallyDoCascade ) { if ( reallyDoCascade ) {
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Cascade {0} for collection: {1}", action, collectionType.getRole() ); LOG.tracev( "Cascade {0} for collection: {1}", action, collectionType.getRole() );
} }
@ -424,7 +426,7 @@ public final class Cascade {
); );
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Done cascade {0} for collection: {1}", action, collectionType.getRole() ); LOG.tracev( "Done cascade {0} for collection: {1}", action, collectionType.getRole() );
} }
} }
@ -435,7 +437,8 @@ public final class Cascade {
child instanceof PersistentCollection; //a newly instantiated collection can't have orphans child instanceof PersistentCollection; //a newly instantiated collection can't have orphans
if ( deleteOrphans ) { // handle orphaned entities!! if ( deleteOrphans ) { // handle orphaned entities!!
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Deleting orphans for collection: {0}", collectionType.getRole() ); LOG.tracev( "Deleting orphans for collection: {0}", collectionType.getRole() );
} }
// we can do the cast since orphan-delete does not apply to: // we can do the cast since orphan-delete does not apply to:
@ -444,7 +447,7 @@ public final class Cascade {
final String entityName = collectionType.getAssociatedEntityName( eventSource.getFactory() ); final String entityName = collectionType.getAssociatedEntityName( eventSource.getFactory() );
deleteOrphans( entityName, (PersistentCollection) child ); deleteOrphans( entityName, (PersistentCollection) child );
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Done deleting orphans for collection: {0}", collectionType.getRole() ); LOG.tracev( "Done deleting orphans for collection: {0}", collectionType.getRole() );
} }
} }

View File

@ -72,7 +72,7 @@ public final class Collections {
CollectionEntry entry = persistenceContext.getCollectionEntry(coll); CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
final CollectionPersister loadedPersister = entry.getLoadedPersister(); final CollectionPersister loadedPersister = entry.getLoadedPersister();
if ( LOG.isDebugEnabled() && loadedPersister != null ) { if ( loadedPersister != null && LOG.isDebugEnabled() ) {
LOG.debugf( LOG.debugf(
"Collection dereferenced: %s", "Collection dereferenced: %s",
MessageHelper.collectionInfoString( loadedPersister, MessageHelper.collectionInfoString( loadedPersister,

View File

@ -112,6 +112,7 @@ public class ParameterBinder {
final NamedParameterSource source, final NamedParameterSource source,
final SessionImplementor session) throws SQLException, HibernateException { final SessionImplementor session) throws SQLException, HibernateException {
if ( namedParams != null ) { if ( namedParams != null ) {
final boolean debugEnabled = LOG.isDebugEnabled();
// assumes that types are all of span 1 // assumes that types are all of span 1
Iterator iter = namedParams.entrySet().iterator(); Iterator iter = namedParams.entrySet().iterator();
int result = 0; int result = 0;
@ -121,7 +122,7 @@ public class ParameterBinder {
TypedValue typedval = (TypedValue) e.getValue(); TypedValue typedval = (TypedValue) e.getValue();
int[] locations = source.getNamedParameterLocations( name ); int[] locations = source.getNamedParameterLocations( name );
for ( int i = 0; i < locations.length; i++ ) { for ( int i = 0; i < locations.length; i++ ) {
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf("bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, locations[i] + start); LOG.debugf("bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, locations[i] + start);
} }
typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session ); typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session );

View File

@ -101,7 +101,7 @@ public final class TwoPhaseLoad {
lazyPropertiesAreUnfetched lazyPropertiesAreUnfetched
); );
if ( LOG.isTraceEnabled() && version != null ) { if ( version != null && LOG.isTraceEnabled() ) {
String versionStr = persister.isVersioned() String versionStr = persister.isVersioned()
? persister.getVersionType().toLoggableString( version, session.getFactory() ) ? persister.getVersionType().toLoggableString( version, session.getFactory() )
: "null"; : "null";
@ -153,7 +153,8 @@ public final class TwoPhaseLoad {
Serializable id = entityEntry.getId(); Serializable id = entityEntry.getId();
Object[] hydratedState = entityEntry.getLoadedState(); Object[] hydratedState = entityEntry.getLoadedState();
if ( LOG.isDebugEnabled() ) { final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( LOG.debugf(
"Resolving associations for %s", "Resolving associations for %s",
MessageHelper.infoString( persister, id, session.getFactory() ) MessageHelper.infoString( persister, id, session.getFactory() )
@ -187,7 +188,7 @@ public final class TwoPhaseLoad {
final SessionFactoryImplementor factory = session.getFactory(); final SessionFactoryImplementor factory = session.getFactory();
if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) { if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) {
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( LOG.debugf(
"Adding entity to second-level cache: %s", "Adding entity to second-level cache: %s",
MessageHelper.infoString( persister, id, session.getFactory() ) MessageHelper.infoString( persister, id, session.getFactory() )
@ -272,7 +273,7 @@ public final class TwoPhaseLoad {
session session
); );
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( LOG.debugf(
"Done materializing entity %s", "Done materializing entity %s",
MessageHelper.infoString( persister, id, session.getFactory() ) MessageHelper.infoString( persister, id, session.getFactory() )

View File

@ -138,9 +138,12 @@ public class SqlExceptionHelper {
message = StringHelper.isNotEmpty(message) ? message : DEFAULT_EXCEPTION_MSG; message = StringHelper.isNotEmpty(message) ? message : DEFAULT_EXCEPTION_MSG;
LOG.debug( message, sqlException ); LOG.debug( message, sqlException );
} }
final boolean warnEnabled = LOG.isEnabled( Level.WARN );
while (sqlException != null) { while (sqlException != null) {
if ( warnEnabled ) {
StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState()); StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState());
LOG.warn(buf.toString()); LOG.warn(buf.toString());
}
LOG.error(sqlException.getMessage()); LOG.error(sqlException.getMessage());
sqlException = sqlException.getNextException(); sqlException = sqlException.getNextException();
} }

View File

@ -219,20 +219,21 @@ public class CollectionLoadContext {
} }
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) { private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( matchedCollectionEntries == null ) { if ( matchedCollectionEntries == null ) {
if ( LOG.isDebugEnabled()) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() ); if ( debugEnabled ) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
return; return;
} }
final int count = matchedCollectionEntries.size(); final int count = matchedCollectionEntries.size();
if ( LOG.isDebugEnabled()) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole()); if ( debugEnabled ) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
for ( int i = 0; i < count; i++ ) { for ( int i = 0; i < count; i++ ) {
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i ); LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
endLoadingCollection( lce, persister ); endLoadingCollection( lce, persister );
} }
if ( LOG.isDebugEnabled() ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() ); if ( debugEnabled ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
} }
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) { private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
@ -287,13 +288,16 @@ public class CollectionLoadContext {
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession(); final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
final SessionFactoryImplementor factory = session.getFactory(); final SessionFactoryImplementor factory = session.getFactory();
if ( LOG.isDebugEnabled() ) { final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) ); LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) );
} }
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) { if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
// some filters affecting the collection are enabled on the session, so do not do the put into the cache. // some filters affecting the collection are enabled on the session, so do not do the put into the cache.
if ( debugEnabled ) {
LOG.debug( "Refusing to add to cache due to enabled filters" ); LOG.debug( "Refusing to add to cache due to enabled filters" );
}
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered; // todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones; // but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
// currently this works in conjuction with the check on // currently this works in conjuction with the check on

View File

@ -472,18 +472,19 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
EntityEntry entry, //pass this as an argument only to avoid double looking EntityEntry entry, //pass this as an argument only to avoid double looking
SessionImplementor source) { SessionImplementor source) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( entry != null ) { // the object is persistent if ( entry != null ) { // the object is persistent
//the entity is associated with the session, so check its status //the entity is associated with the session, so check its status
if ( entry.getStatus() != Status.DELETED ) { if ( entry.getStatus() != Status.DELETED ) {
// do nothing for persistent instances // do nothing for persistent instances
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Persistent instance of: {0}", getLoggableName( entityName, entity ) ); LOG.tracev( "Persistent instance of: {0}", getLoggableName( entityName, entity ) );
} }
return EntityState.PERSISTENT; return EntityState.PERSISTENT;
} }
// ie. e.status==DELETED // ie. e.status==DELETED
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Deleted instance of: {0}", getLoggableName( entityName, entity ) ); LOG.tracev( "Deleted instance of: {0}", getLoggableName( entityName, entity ) );
} }
return EntityState.DELETED; return EntityState.DELETED;
@ -494,12 +495,12 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
// try interceptor and unsaved-value // try interceptor and unsaved-value
if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source )) { if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source )) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Transient instance of: {0}", getLoggableName( entityName, entity ) ); LOG.tracev( "Transient instance of: {0}", getLoggableName( entityName, entity ) );
} }
return EntityState.TRANSIENT; return EntityState.TRANSIENT;
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Detached instance of: {0}", getLoggableName( entityName, entity ) ); LOG.tracev( "Detached instance of: {0}", getLoggableName( entityName, entity ) );
} }
return EntityState.DETACHED; return EntityState.DETACHED;

View File

@ -633,7 +633,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
} }
private void logDirtyProperties(Serializable id, int[] dirtyProperties, EntityPersister persister) { private void logDirtyProperties(Serializable id, int[] dirtyProperties, EntityPersister persister) {
if ( LOG.isTraceEnabled() && dirtyProperties != null && dirtyProperties.length > 0 ) { if ( dirtyProperties != null && dirtyProperties.length > 0 && LOG.isTraceEnabled() ) {
final String[] allPropertyNames = persister.getPropertyNames(); final String[] allPropertyNames = persister.getPropertyNames();
final String[] dirtyPropertyNames = new String[ dirtyProperties.length ]; final String[] dirtyPropertyNames = new String[ dirtyProperties.length ];
for ( int i = 0; i < dirtyProperties.length; i++ ) { for ( int i = 0; i < dirtyProperties.length; i++ ) {

View File

@ -59,12 +59,13 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
CollectionEntry ce = source.getPersistenceContext().getCollectionEntry(collection); CollectionEntry ce = source.getPersistenceContext().getCollectionEntry(collection);
if (ce==null) throw new HibernateException("collection was evicted"); if (ce==null) throw new HibernateException("collection was evicted");
if ( !collection.wasInitialized() ) { if ( !collection.wasInitialized() ) {
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Initializing collection {0}", LOG.tracev( "Initializing collection {0}",
MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) ); MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) );
LOG.trace( "Checking second-level cache" );
} }
LOG.trace( "Checking second-level cache" );
final boolean foundInCache = initializeCollectionFromCache( final boolean foundInCache = initializeCollectionFromCache(
ce.getLoadedKey(), ce.getLoadedKey(),
ce.getLoadedPersister(), ce.getLoadedPersister(),
@ -72,13 +73,17 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
source source
); );
if ( foundInCache ) { if ( foundInCache && traceEnabled ) {
LOG.trace( "Collection initialized from cache" ); LOG.trace( "Collection initialized from cache" );
} }
else { else {
if ( traceEnabled ) {
LOG.trace( "Collection not cached" ); LOG.trace( "Collection not cached" );
}
ce.getLoadedPersister().initialize( ce.getLoadedKey(), source ); ce.getLoadedPersister().initialize( ce.getLoadedKey(), source );
if ( traceEnabled ) {
LOG.trace( "Collection initialized" ); LOG.trace( "Collection initialized" );
}
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) { if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
source.getFactory().getStatisticsImplementor().fetchCollection( source.getFactory().getStatisticsImplementor().fetchCollection(

View File

@ -101,8 +101,9 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
oldVersion = persister.getCurrentVersion( id, source ); oldVersion = persister.getCurrentVersion( id, source );
} }
final boolean traceEnabled = LOG.isTraceEnabled();
if ( oldVersion != null ) { if ( oldVersion != null ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Found existing row for {0}", MessageHelper.infoString( persister, id, source.getFactory() ) ); LOG.tracev( "Found existing row for {0}", MessageHelper.infoString( persister, id, source.getFactory() ) );
} }
@ -120,14 +121,14 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
// else do nothing (don't even reassociate object!) // else do nothing (don't even reassociate object!)
if ( canReplicate ) if ( canReplicate )
performReplication( entity, id, realOldVersion, persister, replicationMode, source ); performReplication( entity, id, realOldVersion, persister, replicationMode, source );
else else if ( traceEnabled )
LOG.trace( "No need to replicate" ); LOG.trace( "No need to replicate" );
//TODO: would it be better to do a refresh from db? //TODO: would it be better to do a refresh from db?
} }
else { else {
// no existing row - do an insert // no existing row - do an insert
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "No existing row, replicating new instance {0}", LOG.tracev( "No existing row, replicating new instance {0}",
MessageHelper.infoString( persister, id, source.getFactory() ) ); MessageHelper.infoString( persister, id, source.getFactory() ) );
} }

View File

@ -117,8 +117,10 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
} }
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException { protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.trace( "Ignoring persistent instance" ); LOG.trace( "Ignoring persistent instance" );
}
EntityEntry entityEntry = event.getEntry(); EntityEntry entityEntry = event.getEntry();
if ( entityEntry == null ) { if ( entityEntry == null ) {
throw new AssertionFailure( "entity was transient or detached" ); throw new AssertionFailure( "entity was transient or detached" );
@ -153,7 +155,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Object already associated with session: {0}", MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) ); LOG.tracev( "Object already associated with session: {0}", MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) );
} }
@ -279,11 +281,12 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
Object entity, Object entity,
EntityPersister persister) throws HibernateException { EntityPersister persister) throws HibernateException {
if ( !persister.isMutable() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled && !persister.isMutable() ) {
LOG.trace( "Immutable instance passed to performUpdate()" ); LOG.trace( "Immutable instance passed to performUpdate()" );
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Updating {0}", LOG.tracev( "Updating {0}",
MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory() ) ); MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory() ) );
} }
@ -329,7 +332,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
persister.afterReassociate(entity, source); persister.afterReassociate(entity, source);
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) ); LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) );
} }

View File

@ -311,7 +311,7 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
} }
public void setIncludeSubclasses(boolean includeSubclasses) { public void setIncludeSubclasses(boolean includeSubclasses) {
if ( LOG.isTraceEnabled() && isDereferencedBySuperclassOrSubclassProperty() && !includeSubclasses ) if ( !includeSubclasses && isDereferencedBySuperclassOrSubclassProperty() && LOG.isTraceEnabled() )
LOG.trace( "Attempt to disable subclass-inclusions : ", new Exception( "Stack-trace source" ) ); LOG.trace( "Attempt to disable subclass-inclusions : ", new Exception( "Stack-trace source" ) );
this.includeSubclasses = includeSubclasses; this.includeSubclasses = includeSubclasses;
} }

View File

@ -122,7 +122,10 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
private void initializePreviousValueHolder(SessionImplementor session) { private void initializePreviousValueHolder(SessionImplementor session) {
previousValueHolder = IdentifierGeneratorHelper.getIntegralDataTypeHolder( returnClass ); previousValueHolder = IdentifierGeneratorHelper.getIntegralDataTypeHolder( returnClass );
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Fetching initial value: %s", sql ); LOG.debugf( "Fetching initial value: %s", sql );
}
try { try {
PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql ); PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
try { try {
@ -131,7 +134,7 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
if (rs.next()) previousValueHolder.initialize(rs, 0L).increment(); if (rs.next()) previousValueHolder.initialize(rs, 0L).increment();
else previousValueHolder.initialize(1L); else previousValueHolder.initialize(1L);
sql = null; sql = null;
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( "First free id: %s", previousValueHolder.makeValue() ); LOG.debugf( "First free id: %s", previousValueHolder.makeValue() );
} }
} }

View File

@ -54,7 +54,6 @@ import org.hibernate.Interceptor;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.MultiTenancyStrategy; import org.hibernate.MultiTenancyStrategy;
import org.hibernate.ObjectNotFoundException; import org.hibernate.ObjectNotFoundException;
import org.hibernate.QueryException;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.SessionBuilder; import org.hibernate.SessionBuilder;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
@ -103,7 +102,6 @@ import org.hibernate.engine.profile.Association;
import org.hibernate.engine.profile.Fetch; import org.hibernate.engine.profile.Fetch;
import org.hibernate.engine.profile.FetchProfile; import org.hibernate.engine.profile.FetchProfile;
import org.hibernate.engine.query.spi.QueryPlanCache; import org.hibernate.engine.query.spi.QueryPlanCache;
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
import org.hibernate.engine.spi.CacheImplementor; import org.hibernate.engine.spi.CacheImplementor;
import org.hibernate.engine.spi.FilterDefinition; import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.Mapping;
@ -657,7 +655,12 @@ public final class SessionFactoryImpl
MetadataImplementor metadata, MetadataImplementor metadata,
SessionFactoryOptions sessionFactoryOptions, SessionFactoryOptions sessionFactoryOptions,
SessionFactoryObserver observer) throws HibernateException { SessionFactoryObserver observer) throws HibernateException {
final boolean traceEnabled = LOG.isTraceEnabled();
final boolean debugEnabled = traceEnabled || LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debug( "Building session factory" ); LOG.debug( "Building session factory" );
}
this.sessionFactoryOptions = sessionFactoryOptions; this.sessionFactoryOptions = sessionFactoryOptions;
@ -699,9 +702,10 @@ public final class SessionFactoryImpl
filters.put( filterDefinition.getFilterName(), filterDefinition ); filters.put( filterDefinition.getFilterName(), filterDefinition );
} }
if ( debugEnabled ) {
LOG.debugf( "Session factory constructed with filter configurations : %s", filters ); LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
LOG.debugf( "Instantiating session factory with properties: %s", properties ); LOG.debugf( "Instantiating session factory with properties: %s", properties );
}
this.queryPlanCache = new QueryPlanCache( this ); this.queryPlanCache = new QueryPlanCache( this );
class IntegratorObserver implements SessionFactoryObserver { class IntegratorObserver implements SessionFactoryObserver {
@ -768,7 +772,7 @@ public final class SessionFactoryImpl
accessStrategy = EntityRegionAccessStrategy.class.cast( entityAccessStrategies.get( cacheRegionName ) ); accessStrategy = EntityRegionAccessStrategy.class.cast( entityAccessStrategies.get( cacheRegionName ) );
if ( accessStrategy == null ) { if ( accessStrategy == null ) {
final AccessType accessType = model.getHierarchyDetails().getCaching().getAccessType(); final AccessType accessType = model.getHierarchyDetails().getCaching().getAccessType();
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Building cache for entity data [{0}]", model.getEntity().getName() ); LOG.tracev( "Building cache for entity data [{0}]", model.getEntity().getName() );
} }
EntityRegion entityRegion = settings.getRegionFactory().buildEntityRegion( EntityRegion entityRegion = settings.getRegionFactory().buildEntityRegion(
@ -803,7 +807,7 @@ public final class SessionFactoryImpl
final AccessType accessType = model.getCaching().getAccessType(); final AccessType accessType = model.getCaching().getAccessType();
CollectionRegionAccessStrategy accessStrategy = null; CollectionRegionAccessStrategy accessStrategy = null;
if ( accessType != null && settings.isSecondLevelCacheEnabled() ) { if ( accessType != null && settings.isSecondLevelCacheEnabled() ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Building cache for collection data [{0}]", model.getAttribute().getRole() ); LOG.tracev( "Building cache for collection data [{0}]", model.getAttribute().getRole() );
} }
CollectionRegion collectionRegion = settings.getRegionFactory().buildCollectionRegion( CollectionRegion collectionRegion = settings.getRegionFactory().buildCollectionRegion(
@ -889,7 +893,9 @@ public final class SessionFactoryImpl
serviceRegistry.getService( JndiService.class ) serviceRegistry.getService( JndiService.class )
); );
if ( debugEnabled ) {
LOG.debug("Instantiated session factory"); LOG.debug("Instantiated session factory");
}
if ( settings.isAutoCreateSchema() ) { if ( settings.isAutoCreateSchema() ) {
new SchemaExport( metadata ) new SchemaExport( metadata )

View File

@ -2574,13 +2574,14 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
return; return;
} }
final boolean debugEnabled = LOG.isDebugEnabled();
for ( Serializable pk : getPersistenceContext().getNaturalIdHelper().getCachedPkResolutions( entityPersister ) ) { for ( Serializable pk : getPersistenceContext().getNaturalIdHelper().getCachedPkResolutions( entityPersister ) ) {
final EntityKey entityKey = generateEntityKey( pk, entityPersister ); final EntityKey entityKey = generateEntityKey( pk, entityPersister );
final Object entity = getPersistenceContext().getEntity( entityKey ); final Object entity = getPersistenceContext().getEntity( entityKey );
final EntityEntry entry = getPersistenceContext().getEntry( entity ); final EntityEntry entry = getPersistenceContext().getEntry( entity );
if ( entry == null ) { if ( entry == null ) {
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debug( LOG.debug(
"Cached natural-id/pk resolution linked to null EntityEntry in persistence context : " "Cached natural-id/pk resolution linked to null EntityEntry in persistence context : "
+ MessageHelper.infoString( entityPersister, pk, getFactory() ) + MessageHelper.infoString( entityPersister, pk, getFactory() )

View File

@ -1346,16 +1346,16 @@ public abstract class Loader {
final SessionImplementor session) { final SessionImplementor session) {
if ( keys != null ) { if ( keys != null ) {
final boolean debugEnabled = LOG.isDebugEnabled();
// this is a collection initializer, so we must create a collection // this is a collection initializer, so we must create a collection
// for each of the passed-in keys, to account for the possibility // for each of the passed-in keys, to account for the possibility
// that the collection is empty and has no rows in the result set // that the collection is empty and has no rows in the result set
CollectionPersister[] collectionPersisters = getCollectionPersisters(); CollectionPersister[] collectionPersisters = getCollectionPersisters();
for ( int j=0; j<collectionPersisters.length; j++ ) { for ( int j=0; j<collectionPersisters.length; j++ ) {
for ( int i = 0; i < keys.length; i++ ) { for ( int i = 0; i < keys.length; i++ ) {
//handle empty collections //handle empty collections
if ( LOG.isDebugEnabled() ) { if ( debugEnabled ) {
LOG.debugf( "Result set contains (possibly empty) collection: %s", LOG.debugf( "Result set contains (possibly empty) collection: %s",
MessageHelper.collectionInfoString( collectionPersisters[j], keys[i], getFactory() ) ); MessageHelper.collectionInfoString( collectionPersisters[j], keys[i], getFactory() ) );
} }

View File

@ -3294,14 +3294,15 @@ public abstract class AbstractEntityPersister
); );
} }
if ( LOG.isTraceEnabled() ) { final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Deleting entity: {0}", MessageHelper.infoString( this, id, getFactory() ) ); LOG.tracev( "Deleting entity: {0}", MessageHelper.infoString( this, id, getFactory() ) );
if ( useVersion ) if ( useVersion )
LOG.tracev( "Version: {0}", version ); LOG.tracev( "Version: {0}", version );
} }
if ( isTableCascadeDeleteEnabled( j ) ) { if ( isTableCascadeDeleteEnabled( j ) ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Delete handled by foreign key constraint: {0}", getTableName( j ) ); LOG.tracev( "Delete handled by foreign key constraint: {0}", getTableName( j ) );
} }
return; //EARLY EXIT! return; //EARLY EXIT!

View File

@ -340,15 +340,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
public void setValue(PreparedStatement st, Enum value, int index) throws SQLException { public void setValue(PreparedStatement st, Enum value, int index) throws SQLException {
final Object jdbcValue = value == null ? null : extractJdbcValue( value ); final Object jdbcValue = value == null ? null : extractJdbcValue( value );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( jdbcValue == null ) { if ( jdbcValue == null ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Binding null to parameter: [%s]", index)); LOG.trace(String.format("Binding null to parameter: [%s]", index));
} }
st.setNull( index, getSqlType() ); st.setNull( index, getSqlType() );
return; return;
} }
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Binding [%s] to parameter: [%s]", jdbcValue, index)); LOG.trace(String.format("Binding [%s] to parameter: [%s]", jdbcValue, index));
} }
st.setObject( index, jdbcValue, EnumType.this.sqlType ); st.setObject( index, jdbcValue, EnumType.this.sqlType );
@ -366,15 +367,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
@Override @Override
public Enum getValue(ResultSet rs, String[] names) throws SQLException { public Enum getValue(ResultSet rs, String[] names) throws SQLException {
final int ordinal = rs.getInt( names[0] ); final int ordinal = rs.getInt( names[0] );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( rs.wasNull() ) { if ( rs.wasNull() ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Returning null as column [%s]", names[0])); LOG.trace(String.format("Returning null as column [%s]", names[0]));
} }
return null; return null;
} }
final Enum enumValue = fromOrdinal( ordinal ); final Enum enumValue = fromOrdinal( ordinal );
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0])); LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
} }
return enumValue; return enumValue;
@ -436,15 +438,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
public Enum getValue(ResultSet rs, String[] names) throws SQLException { public Enum getValue(ResultSet rs, String[] names) throws SQLException {
final String value = rs.getString( names[0] ); final String value = rs.getString( names[0] );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( rs.wasNull() ) { if ( rs.wasNull() ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Returning null as column [%s]", names[0])); LOG.trace(String.format("Returning null as column [%s]", names[0]));
} }
return null; return null;
} }
final Enum enumValue = fromName( value ); final Enum enumValue = fromName( value );
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0])); LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
} }
return enumValue; return enumValue;

View File

@ -66,8 +66,9 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
* {@inheritDoc} * {@inheritDoc}
*/ */
public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException { public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null ) { if ( value == null ) {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace( LOG.trace(
String.format( String.format(
NULL_BIND_MSG_TEMPLATE, NULL_BIND_MSG_TEMPLATE,
@ -79,7 +80,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
st.setNull( index, sqlDescriptor.getSqlType() ); st.setNull( index, sqlDescriptor.getSqlType() );
} }
else { else {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.trace( LOG.trace(
String.format( String.format(
BIND_MSG_TEMPLATE, BIND_MSG_TEMPLATE,

View File

@ -62,12 +62,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
@Override @Override
public J extract(ResultSet rs, String name, WrapperOptions options) throws SQLException { public J extract(ResultSet rs, String name, WrapperOptions options) throws SQLException {
final J value = doExtract( rs, name, options ); final J value = doExtract( rs, name, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || rs.wasNull() ) { if ( value == null || rs.wasNull() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [null] as column [{0}]", name ); LOG.tracev( "Found [null] as column [{0}]", name );
}
return null; return null;
} }
else { else {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as column [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), name ); LOG.tracev( "Found [{0}] as column [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), name );
} }
return value; return value;
@ -93,12 +96,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
@Override @Override
public J extract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { public J extract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
final J value = doExtract( statement, index, options ); final J value = doExtract( statement, index, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || statement.wasNull() ) { if ( value == null || statement.wasNull() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", index ); LOG.tracev( "Found [null] as procedure output parameter [{0}]", index );
}
return null; return null;
} }
else { else {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), index ); LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), index );
} }
return value; return value;
@ -128,12 +134,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
} }
final String paramName = paramNames[0]; final String paramName = paramNames[0];
final J value = doExtract( statement, paramName, options ); final J value = doExtract( statement, paramName, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || statement.wasNull() ) { if ( value == null || statement.wasNull() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", paramName ); LOG.tracev( "Found [null] as procedure output parameter [{0}]", paramName );
}
return null; return null;
} }
else { else {
if ( LOG.isTraceEnabled() ) { if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), paramName ); LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), paramName );
} }
return value; return value;

View File

@ -469,7 +469,7 @@ sessionFactory().getStatistics().clear();
contactExists(); contactExists();
thinkRandomTime(); thinkRandomTime();
++completedIterations; ++completedIterations;
if ( log.isTraceEnabled() ) { if ( trace ) {
log.tracef( "Iteration completed {0}", completedIterations ); log.tracef( "Iteration completed {0}", completedIterations );
} }
} }