HHH-7908 Logging level checking

Conflicts:
	hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java
This commit is contained in:
Nikolay Shestakov 2013-04-12 09:56:41 -04:00 committed by Brett Meyer
parent 03daecfe49
commit cbbadea538
26 changed files with 139 additions and 84 deletions

View File

@ -204,11 +204,13 @@ public class UnresolvedEntityInsertActions {
if ( entityEntry.getStatus() != Status.MANAGED && entityEntry.getStatus() != Status.READ_ONLY ) {
throw new IllegalArgumentException( "EntityEntry did not have status MANAGED or READ_ONLY: " + entityEntry );
}
final boolean traceEnabled = LOG.isTraceEnabled();
// Find out if there are any unresolved insertions that are waiting for the
// specified entity to be resolved.
Set<AbstractEntityInsertAction> dependentActions = dependentActionsByTransientEntity.remove( managedEntity );
if ( dependentActions == null ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev(
"No unresolved entity inserts that depended on [{0}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() )
@ -217,7 +219,7 @@ public class UnresolvedEntityInsertActions {
return Collections.emptySet(); //NOTE EARLY EXIT!
}
Set<AbstractEntityInsertAction> resolvedActions = new IdentitySet( );
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev(
"Unresolved inserts before resolving [{0}]: [{1}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),
@ -225,7 +227,7 @@ public class UnresolvedEntityInsertActions {
);
}
for ( AbstractEntityInsertAction dependentAction : dependentActions ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev(
"Resolving insert [{0}] dependency on [{1}]",
MessageHelper.infoString( dependentAction.getEntityName(), dependentAction.getId() ),
@ -235,7 +237,7 @@ public class UnresolvedEntityInsertActions {
NonNullableTransientDependencies dependencies = dependenciesByAction.get( dependentAction );
dependencies.resolveNonNullableTransientEntity( managedEntity );
if ( dependencies.isEmpty() ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev(
"Resolving insert [{0}] (only depended on [{1}])",
dependentAction,
@ -247,7 +249,7 @@ public class UnresolvedEntityInsertActions {
resolvedActions.add( dependentAction );
}
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev(
"Unresolved inserts after resolving [{0}]: [{1}]",
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),

View File

@ -1476,7 +1476,8 @@ public final class AnnotationBinder {
* ordering does not matter
*/
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Processing annotations of {0}.{1}" , propertyHolder.getEntityName(), inferredData.getPropertyName() );
}
@ -1542,7 +1543,7 @@ public final class AnnotationBinder {
+ propertyHolder.getEntityName()
);
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "{0} is a version property", inferredData.getPropertyName() );
}
RootClass rootClass = ( RootClass ) propertyHolder.getPersistentClass();
@ -1568,7 +1569,7 @@ public final class AnnotationBinder {
SimpleValue simpleValue = ( SimpleValue ) prop.getValue();
simpleValue.setNullValue( "undefined" );
rootClass.setOptimisticLockMode( Versioning.OPTIMISTIC_LOCK_VERSION );
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Version name: {0}, unsavedValue: {1}", rootClass.getVersion().getName(),
( (SimpleValue) rootClass.getVersion().getValue() ).getNullValue() );
}

View File

@ -61,12 +61,15 @@ public abstract class CollectionSecondPass implements SecondPass {
public void doSecondPass(java.util.Map persistentClasses)
throws MappingException {
LOG.debugf( "Second pass for collection: %s", collection.getRole() );
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Second pass for collection: %s", collection.getRole() );
}
secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map
collection.createAllKeys();
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
String msg = "Mapped collection key: " + columns( collection.getKey() );
if ( collection.isIndexed() )
msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() );

View File

@ -2657,6 +2657,7 @@ public final class HbmBinder {
"not valid within collection using join fetching [" + collection.getRole() + "]"
);
}
final boolean debugEnabled = LOG.isDebugEnabled();
while ( filters.hasNext() ) {
final Element filterElement = ( Element ) filters.next();
final String name = filterElement.attributeValue( "name" );
@ -2674,7 +2675,7 @@ public final class HbmBinder {
Element alias = (Element) aliasesIterator.next();
aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table"));
}
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf( "Applying many-to-many filter [%s] as [%s] to role [%s]", name, condition, collection.getRole() );
}
String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection");

View File

@ -814,7 +814,9 @@ public abstract class CollectionBinder {
boolean ignoreNotFound,
Mappings mappings,
Map<XClass, InheritanceState> inheritanceStatePerClass) {
if ( LOG.isDebugEnabled() ) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Binding a OneToMany: %s.%s through a foreign key", propertyHolder.getEntityName(), propertyName );
}
org.hibernate.mapping.OneToMany oneToMany = new org.hibernate.mapping.OneToMany( mappings, collection.getOwner() );
@ -855,7 +857,7 @@ public abstract class CollectionBinder {
column.setJoins( joins );
collection.setCollectionTable( column.getTable() );
}
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() );
}
bindFilters( false );

View File

@ -146,7 +146,8 @@ public final class Cascade {
throws HibernateException {
if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Processing cascade {0} for: {1}", action, persister.getEntityName() );
}
@ -183,7 +184,7 @@ public final class Cascade {
}
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Done processing cascade {0} for: {1}", action, persister.getEntityName() );
}
}
@ -407,7 +408,8 @@ public final class Cascade {
boolean reallyDoCascade = style.reallyDoCascade(action) && child!=CollectionType.UNFETCHED_COLLECTION;
if ( reallyDoCascade ) {
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Cascade {0} for collection: {1}", action, collectionType.getRole() );
}
@ -424,7 +426,7 @@ public final class Cascade {
);
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Done cascade {0} for collection: {1}", action, collectionType.getRole() );
}
}
@ -435,7 +437,8 @@ public final class Cascade {
child instanceof PersistentCollection; //a newly instantiated collection can't have orphans
if ( deleteOrphans ) { // handle orphaned entities!!
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Deleting orphans for collection: {0}", collectionType.getRole() );
}
// we can do the cast since orphan-delete does not apply to:
@ -444,7 +447,7 @@ public final class Cascade {
final String entityName = collectionType.getAssociatedEntityName( eventSource.getFactory() );
deleteOrphans( entityName, (PersistentCollection) child );
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Done deleting orphans for collection: {0}", collectionType.getRole() );
}
}

View File

@ -72,7 +72,7 @@ public final class Collections {
CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
final CollectionPersister loadedPersister = entry.getLoadedPersister();
if ( LOG.isDebugEnabled() && loadedPersister != null ) {
if ( loadedPersister != null && LOG.isDebugEnabled() ) {
LOG.debugf(
"Collection dereferenced: %s",
MessageHelper.collectionInfoString( loadedPersister,

View File

@ -112,6 +112,7 @@ public class ParameterBinder {
final NamedParameterSource source,
final SessionImplementor session) throws SQLException, HibernateException {
if ( namedParams != null ) {
final boolean debugEnabled = LOG.isDebugEnabled();
// assumes that types are all of span 1
Iterator iter = namedParams.entrySet().iterator();
int result = 0;
@ -121,7 +122,7 @@ public class ParameterBinder {
TypedValue typedval = (TypedValue) e.getValue();
int[] locations = source.getNamedParameterLocations( name );
for ( int i = 0; i < locations.length; i++ ) {
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf("bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, locations[i] + start);
}
typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session );

View File

@ -101,7 +101,7 @@ public final class TwoPhaseLoad {
lazyPropertiesAreUnfetched
);
if ( LOG.isTraceEnabled() && version != null ) {
if ( version != null && LOG.isTraceEnabled() ) {
String versionStr = persister.isVersioned()
? persister.getVersionType().toLoggableString( version, session.getFactory() )
: "null";
@ -153,7 +153,8 @@ public final class TwoPhaseLoad {
Serializable id = entityEntry.getId();
Object[] hydratedState = entityEntry.getLoadedState();
if ( LOG.isDebugEnabled() ) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf(
"Resolving associations for %s",
MessageHelper.infoString( persister, id, session.getFactory() )
@ -187,7 +188,7 @@ public final class TwoPhaseLoad {
final SessionFactoryImplementor factory = session.getFactory();
if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) {
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf(
"Adding entity to second-level cache: %s",
MessageHelper.infoString( persister, id, session.getFactory() )
@ -272,7 +273,7 @@ public final class TwoPhaseLoad {
session
);
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf(
"Done materializing entity %s",
MessageHelper.infoString( persister, id, session.getFactory() )

View File

@ -136,11 +136,14 @@ public class SqlExceptionHelper {
if (LOG.isEnabled(Level.ERROR)) {
if (LOG.isDebugEnabled()) {
message = StringHelper.isNotEmpty(message) ? message : DEFAULT_EXCEPTION_MSG;
LOG.debug( message, sqlException );
LOG.debug( message, sqlException );
}
final boolean warnEnabled = LOG.isEnabled( Level.WARN );
while (sqlException != null) {
StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState());
LOG.warn(buf.toString());
if ( warnEnabled ) {
StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState());
LOG.warn(buf.toString());
}
LOG.error(sqlException.getMessage());
sqlException = sqlException.getNextException();
}

View File

@ -219,20 +219,21 @@ public class CollectionLoadContext {
}
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( matchedCollectionEntries == null ) {
if ( LOG.isDebugEnabled()) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
if ( debugEnabled ) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
return;
}
final int count = matchedCollectionEntries.size();
if ( LOG.isDebugEnabled()) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
if ( debugEnabled ) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
for ( int i = 0; i < count; i++ ) {
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
endLoadingCollection( lce, persister );
}
if ( LOG.isDebugEnabled() ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
if ( debugEnabled ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
}
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
@ -287,13 +288,16 @@ public class CollectionLoadContext {
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
final SessionFactoryImplementor factory = session.getFactory();
if ( LOG.isDebugEnabled() ) {
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) );
}
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
LOG.debug( "Refusing to add to cache due to enabled filters" );
if ( debugEnabled ) {
LOG.debug( "Refusing to add to cache due to enabled filters" );
}
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
// currently this works in conjuction with the check on

View File

@ -472,18 +472,19 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
EntityEntry entry, //pass this as an argument only to avoid double looking
SessionImplementor source) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( entry != null ) { // the object is persistent
//the entity is associated with the session, so check its status
if ( entry.getStatus() != Status.DELETED ) {
// do nothing for persistent instances
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Persistent instance of: {0}", getLoggableName( entityName, entity ) );
}
return EntityState.PERSISTENT;
}
// ie. e.status==DELETED
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Deleted instance of: {0}", getLoggableName( entityName, entity ) );
}
return EntityState.DELETED;
@ -494,12 +495,12 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
// try interceptor and unsaved-value
if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source )) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Transient instance of: {0}", getLoggableName( entityName, entity ) );
}
return EntityState.TRANSIENT;
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Detached instance of: {0}", getLoggableName( entityName, entity ) );
}
return EntityState.DETACHED;

View File

@ -633,7 +633,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
}
private void logDirtyProperties(Serializable id, int[] dirtyProperties, EntityPersister persister) {
if ( LOG.isTraceEnabled() && dirtyProperties != null && dirtyProperties.length > 0 ) {
if ( dirtyProperties != null && dirtyProperties.length > 0 && LOG.isTraceEnabled() ) {
final String[] allPropertyNames = persister.getPropertyNames();
final String[] dirtyPropertyNames = new String[ dirtyProperties.length ];
for ( int i = 0; i < dirtyProperties.length; i++ ) {

View File

@ -59,12 +59,13 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
CollectionEntry ce = source.getPersistenceContext().getCollectionEntry(collection);
if (ce==null) throw new HibernateException("collection was evicted");
if ( !collection.wasInitialized() ) {
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Initializing collection {0}",
MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) );
LOG.trace( "Checking second-level cache" );
}
LOG.trace( "Checking second-level cache" );
final boolean foundInCache = initializeCollectionFromCache(
ce.getLoadedKey(),
ce.getLoadedPersister(),
@ -72,13 +73,17 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
source
);
if ( foundInCache ) {
if ( foundInCache && traceEnabled ) {
LOG.trace( "Collection initialized from cache" );
}
else {
LOG.trace( "Collection not cached" );
if ( traceEnabled ) {
LOG.trace( "Collection not cached" );
}
ce.getLoadedPersister().initialize( ce.getLoadedKey(), source );
LOG.trace( "Collection initialized" );
if ( traceEnabled ) {
LOG.trace( "Collection initialized" );
}
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
source.getFactory().getStatisticsImplementor().fetchCollection(

View File

@ -101,8 +101,9 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
oldVersion = persister.getCurrentVersion( id, source );
}
final boolean traceEnabled = LOG.isTraceEnabled();
if ( oldVersion != null ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Found existing row for {0}", MessageHelper.infoString( persister, id, source.getFactory() ) );
}
@ -120,14 +121,14 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
// else do nothing (don't even reassociate object!)
if ( canReplicate )
performReplication( entity, id, realOldVersion, persister, replicationMode, source );
else
else if ( traceEnabled )
LOG.trace( "No need to replicate" );
//TODO: would it be better to do a refresh from db?
}
else {
// no existing row - do an insert
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "No existing row, replicating new instance {0}",
MessageHelper.infoString( persister, id, source.getFactory() ) );
}

View File

@ -117,8 +117,10 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
}
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException {
LOG.trace( "Ignoring persistent instance" );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.trace( "Ignoring persistent instance" );
}
EntityEntry entityEntry = event.getEntry();
if ( entityEntry == null ) {
throw new AssertionFailure( "entity was transient or detached" );
@ -153,7 +155,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Object already associated with session: {0}", MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) );
}
@ -279,11 +281,12 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
Object entity,
EntityPersister persister) throws HibernateException {
if ( !persister.isMutable() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled && !persister.isMutable() ) {
LOG.trace( "Immutable instance passed to performUpdate()" );
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Updating {0}",
MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory() ) );
}
@ -329,7 +332,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
persister.afterReassociate(entity, source);
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) );
}

View File

@ -311,7 +311,7 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
}
public void setIncludeSubclasses(boolean includeSubclasses) {
if ( LOG.isTraceEnabled() && isDereferencedBySuperclassOrSubclassProperty() && !includeSubclasses )
if ( !includeSubclasses && isDereferencedBySuperclassOrSubclassProperty() && LOG.isTraceEnabled() )
LOG.trace( "Attempt to disable subclass-inclusions : ", new Exception( "Stack-trace source" ) );
this.includeSubclasses = includeSubclasses;
}

View File

@ -122,7 +122,10 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
private void initializePreviousValueHolder(SessionImplementor session) {
previousValueHolder = IdentifierGeneratorHelper.getIntegralDataTypeHolder( returnClass );
LOG.debugf( "Fetching initial value: %s", sql );
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Fetching initial value: %s", sql );
}
try {
PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
try {
@ -131,7 +134,7 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
if (rs.next()) previousValueHolder.initialize(rs, 0L).increment();
else previousValueHolder.initialize(1L);
sql = null;
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf( "First free id: %s", previousValueHolder.makeValue() );
}
}

View File

@ -54,7 +54,6 @@ import org.hibernate.Interceptor;
import org.hibernate.MappingException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.ObjectNotFoundException;
import org.hibernate.QueryException;
import org.hibernate.Session;
import org.hibernate.SessionBuilder;
import org.hibernate.SessionFactory;
@ -103,7 +102,6 @@ import org.hibernate.engine.profile.Association;
import org.hibernate.engine.profile.Fetch;
import org.hibernate.engine.profile.FetchProfile;
import org.hibernate.engine.query.spi.QueryPlanCache;
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
import org.hibernate.engine.spi.CacheImplementor;
import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.engine.spi.Mapping;
@ -657,7 +655,12 @@ public final class SessionFactoryImpl
MetadataImplementor metadata,
SessionFactoryOptions sessionFactoryOptions,
SessionFactoryObserver observer) throws HibernateException {
LOG.debug( "Building session factory" );
final boolean traceEnabled = LOG.isTraceEnabled();
final boolean debugEnabled = traceEnabled || LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debug( "Building session factory" );
}
this.sessionFactoryOptions = sessionFactoryOptions;
@ -699,9 +702,10 @@ public final class SessionFactoryImpl
filters.put( filterDefinition.getFilterName(), filterDefinition );
}
LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
LOG.debugf( "Instantiating session factory with properties: %s", properties );
if ( debugEnabled ) {
LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
LOG.debugf( "Instantiating session factory with properties: %s", properties );
}
this.queryPlanCache = new QueryPlanCache( this );
class IntegratorObserver implements SessionFactoryObserver {
@ -768,7 +772,7 @@ public final class SessionFactoryImpl
accessStrategy = EntityRegionAccessStrategy.class.cast( entityAccessStrategies.get( cacheRegionName ) );
if ( accessStrategy == null ) {
final AccessType accessType = model.getHierarchyDetails().getCaching().getAccessType();
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Building cache for entity data [{0}]", model.getEntity().getName() );
}
EntityRegion entityRegion = settings.getRegionFactory().buildEntityRegion(
@ -803,7 +807,7 @@ public final class SessionFactoryImpl
final AccessType accessType = model.getCaching().getAccessType();
CollectionRegionAccessStrategy accessStrategy = null;
if ( accessType != null && settings.isSecondLevelCacheEnabled() ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Building cache for collection data [{0}]", model.getAttribute().getRole() );
}
CollectionRegion collectionRegion = settings.getRegionFactory().buildCollectionRegion(
@ -889,7 +893,9 @@ public final class SessionFactoryImpl
serviceRegistry.getService( JndiService.class )
);
LOG.debug("Instantiated session factory");
if ( debugEnabled ) {
LOG.debug("Instantiated session factory");
}
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( metadata )

View File

@ -2574,13 +2574,14 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
return;
}
final boolean debugEnabled = LOG.isDebugEnabled();
for ( Serializable pk : getPersistenceContext().getNaturalIdHelper().getCachedPkResolutions( entityPersister ) ) {
final EntityKey entityKey = generateEntityKey( pk, entityPersister );
final Object entity = getPersistenceContext().getEntity( entityKey );
final EntityEntry entry = getPersistenceContext().getEntry( entity );
if ( entry == null ) {
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debug(
"Cached natural-id/pk resolution linked to null EntityEntry in persistence context : "
+ MessageHelper.infoString( entityPersister, pk, getFactory() )

View File

@ -1346,16 +1346,16 @@ public abstract class Loader {
final SessionImplementor session) {
if ( keys != null ) {
final boolean debugEnabled = LOG.isDebugEnabled();
// this is a collection initializer, so we must create a collection
// for each of the passed-in keys, to account for the possibility
// that the collection is empty and has no rows in the result set
CollectionPersister[] collectionPersisters = getCollectionPersisters();
for ( int j=0; j<collectionPersisters.length; j++ ) {
for ( int i = 0; i < keys.length; i++ ) {
//handle empty collections
if ( LOG.isDebugEnabled() ) {
if ( debugEnabled ) {
LOG.debugf( "Result set contains (possibly empty) collection: %s",
MessageHelper.collectionInfoString( collectionPersisters[j], keys[i], getFactory() ) );
}

View File

@ -3294,14 +3294,15 @@ public abstract class AbstractEntityPersister
);
}
if ( LOG.isTraceEnabled() ) {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Deleting entity: {0}", MessageHelper.infoString( this, id, getFactory() ) );
if ( useVersion )
LOG.tracev( "Version: {0}", version );
}
if ( isTableCascadeDeleteEnabled( j ) ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Delete handled by foreign key constraint: {0}", getTableName( j ) );
}
return; //EARLY EXIT!

View File

@ -340,15 +340,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
public void setValue(PreparedStatement st, Enum value, int index) throws SQLException {
final Object jdbcValue = value == null ? null : extractJdbcValue( value );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( jdbcValue == null ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Binding null to parameter: [%s]", index));
}
st.setNull( index, getSqlType() );
return;
}
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Binding [%s] to parameter: [%s]", jdbcValue, index));
}
st.setObject( index, jdbcValue, EnumType.this.sqlType );
@ -366,15 +367,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
@Override
public Enum getValue(ResultSet rs, String[] names) throws SQLException {
final int ordinal = rs.getInt( names[0] );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( rs.wasNull() ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Returning null as column [%s]", names[0]));
}
return null;
}
final Enum enumValue = fromOrdinal( ordinal );
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
}
return enumValue;
@ -436,15 +438,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
public Enum getValue(ResultSet rs, String[] names) throws SQLException {
final String value = rs.getString( names[0] );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( rs.wasNull() ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Returning null as column [%s]", names[0]));
}
return null;
}
final Enum enumValue = fromName( value );
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
}
return enumValue;

View File

@ -66,8 +66,9 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
* {@inheritDoc}
*/
public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null ) {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(
String.format(
NULL_BIND_MSG_TEMPLATE,
@ -79,7 +80,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
st.setNull( index, sqlDescriptor.getSqlType() );
}
else {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.trace(
String.format(
BIND_MSG_TEMPLATE,

View File

@ -62,12 +62,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
@Override
public J extract(ResultSet rs, String name, WrapperOptions options) throws SQLException {
final J value = doExtract( rs, name, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || rs.wasNull() ) {
LOG.tracev( "Found [null] as column [{0}]", name );
if ( traceEnabled ) {
LOG.tracev( "Found [null] as column [{0}]", name );
}
return null;
}
else {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as column [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), name );
}
return value;
@ -93,12 +96,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
@Override
public J extract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
final J value = doExtract( statement, index, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || statement.wasNull() ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", index );
if ( traceEnabled ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", index );
}
return null;
}
else {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), index );
}
return value;
@ -128,12 +134,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
}
final String paramName = paramNames[0];
final J value = doExtract( statement, paramName, options );
final boolean traceEnabled = LOG.isTraceEnabled();
if ( value == null || statement.wasNull() ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", paramName );
if ( traceEnabled ) {
LOG.tracev( "Found [null] as procedure output parameter [{0}]", paramName );
}
return null;
}
else {
if ( LOG.isTraceEnabled() ) {
if ( traceEnabled ) {
LOG.tracev( "Found [{0}] as procedure output parameter [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), paramName );
}
return value;

View File

@ -469,7 +469,7 @@ sessionFactory().getStatistics().clear();
contactExists();
thinkRandomTime();
++completedIterations;
if ( log.isTraceEnabled() ) {
if ( trace ) {
log.tracef( "Iteration completed {0}", completedIterations );
}
}