HHH-7908 Logging level checking
Conflicts: hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/BasicExtractor.java
This commit is contained in:
parent
b5d2f455f0
commit
06e6c04e39
|
@ -204,11 +204,13 @@ public class UnresolvedEntityInsertActions {
|
|||
if ( entityEntry.getStatus() != Status.MANAGED && entityEntry.getStatus() != Status.READ_ONLY ) {
|
||||
throw new IllegalArgumentException( "EntityEntry did not have status MANAGED or READ_ONLY: " + entityEntry );
|
||||
}
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
// Find out if there are any unresolved insertions that are waiting for the
|
||||
// specified entity to be resolved.
|
||||
Set<AbstractEntityInsertAction> dependentActions = dependentActionsByTransientEntity.remove( managedEntity );
|
||||
if ( dependentActions == null ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev(
|
||||
"No unresolved entity inserts that depended on [{0}]",
|
||||
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() )
|
||||
|
@ -217,7 +219,7 @@ public class UnresolvedEntityInsertActions {
|
|||
return Collections.emptySet(); //NOTE EARLY EXIT!
|
||||
}
|
||||
Set<AbstractEntityInsertAction> resolvedActions = new IdentitySet( );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev(
|
||||
"Unresolved inserts before resolving [{0}]: [{1}]",
|
||||
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),
|
||||
|
@ -225,7 +227,7 @@ public class UnresolvedEntityInsertActions {
|
|||
);
|
||||
}
|
||||
for ( AbstractEntityInsertAction dependentAction : dependentActions ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev(
|
||||
"Resolving insert [{0}] dependency on [{1}]",
|
||||
MessageHelper.infoString( dependentAction.getEntityName(), dependentAction.getId() ),
|
||||
|
@ -235,7 +237,7 @@ public class UnresolvedEntityInsertActions {
|
|||
NonNullableTransientDependencies dependencies = dependenciesByAction.get( dependentAction );
|
||||
dependencies.resolveNonNullableTransientEntity( managedEntity );
|
||||
if ( dependencies.isEmpty() ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev(
|
||||
"Resolving insert [{0}] (only depended on [{1}])",
|
||||
dependentAction,
|
||||
|
@ -247,7 +249,7 @@ public class UnresolvedEntityInsertActions {
|
|||
resolvedActions.add( dependentAction );
|
||||
}
|
||||
}
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev(
|
||||
"Unresolved inserts after resolving [{0}]: [{1}]",
|
||||
MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ),
|
||||
|
|
|
@ -1471,7 +1471,8 @@ public final class AnnotationBinder {
|
|||
* ordering does not matter
|
||||
*/
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Processing annotations of {0}.{1}" , propertyHolder.getEntityName(), inferredData.getPropertyName() );
|
||||
}
|
||||
|
||||
|
@ -1537,7 +1538,7 @@ public final class AnnotationBinder {
|
|||
+ propertyHolder.getEntityName()
|
||||
);
|
||||
}
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "{0} is a version property", inferredData.getPropertyName() );
|
||||
}
|
||||
RootClass rootClass = ( RootClass ) propertyHolder.getPersistentClass();
|
||||
|
@ -1563,7 +1564,7 @@ public final class AnnotationBinder {
|
|||
SimpleValue simpleValue = ( SimpleValue ) prop.getValue();
|
||||
simpleValue.setNullValue( "undefined" );
|
||||
rootClass.setOptimisticLockMode( Versioning.OPTIMISTIC_LOCK_VERSION );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Version name: {0}, unsavedValue: {1}", rootClass.getVersion().getName(),
|
||||
( (SimpleValue) rootClass.getVersion().getValue() ).getNullValue() );
|
||||
}
|
||||
|
|
|
@ -61,12 +61,15 @@ public abstract class CollectionSecondPass implements SecondPass {
|
|||
|
||||
public void doSecondPass(java.util.Map persistentClasses)
|
||||
throws MappingException {
|
||||
LOG.debugf( "Second pass for collection: %s", collection.getRole() );
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Second pass for collection: %s", collection.getRole() );
|
||||
}
|
||||
|
||||
secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map
|
||||
collection.createAllKeys();
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
String msg = "Mapped collection key: " + columns( collection.getKey() );
|
||||
if ( collection.isIndexed() )
|
||||
msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() );
|
||||
|
|
|
@ -2656,6 +2656,7 @@ public final class HbmBinder {
|
|||
"not valid within collection using join fetching [" + collection.getRole() + "]"
|
||||
);
|
||||
}
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
while ( filters.hasNext() ) {
|
||||
final Element filterElement = ( Element ) filters.next();
|
||||
final String name = filterElement.attributeValue( "name" );
|
||||
|
@ -2673,7 +2674,7 @@ public final class HbmBinder {
|
|||
Element alias = (Element) aliasesIterator.next();
|
||||
aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table"));
|
||||
}
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Applying many-to-many filter [%s] as [%s] to role [%s]", name, condition, collection.getRole() );
|
||||
}
|
||||
String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection");
|
||||
|
|
|
@ -755,7 +755,9 @@ public abstract class CollectionBinder {
|
|||
String hqlOrderBy,
|
||||
Mappings mappings,
|
||||
Map<XClass, InheritanceState> inheritanceStatePerClass) {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Binding a OneToMany: %s.%s through a foreign key", propertyHolder.getEntityName(), propertyName );
|
||||
}
|
||||
org.hibernate.mapping.OneToMany oneToMany = new org.hibernate.mapping.OneToMany( mappings, collection.getOwner() );
|
||||
|
@ -784,7 +786,7 @@ public abstract class CollectionBinder {
|
|||
column.setJoins( joins );
|
||||
collection.setCollectionTable( column.getTable() );
|
||||
}
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() );
|
||||
}
|
||||
bindFilters( false );
|
||||
|
|
|
@ -146,7 +146,8 @@ public final class Cascade {
|
|||
throws HibernateException {
|
||||
|
||||
if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Processing cascade {0} for: {1}", action, persister.getEntityName() );
|
||||
}
|
||||
|
||||
|
@ -183,7 +184,7 @@ public final class Cascade {
|
|||
}
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Done processing cascade {0} for: {1}", action, persister.getEntityName() );
|
||||
}
|
||||
}
|
||||
|
@ -407,7 +408,8 @@ public final class Cascade {
|
|||
boolean reallyDoCascade = style.reallyDoCascade(action) && child!=CollectionType.UNFETCHED_COLLECTION;
|
||||
|
||||
if ( reallyDoCascade ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Cascade {0} for collection: {1}", action, collectionType.getRole() );
|
||||
}
|
||||
|
||||
|
@ -424,7 +426,7 @@ public final class Cascade {
|
|||
);
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Done cascade {0} for collection: {1}", action, collectionType.getRole() );
|
||||
}
|
||||
}
|
||||
|
@ -435,7 +437,8 @@ public final class Cascade {
|
|||
child instanceof PersistentCollection; //a newly instantiated collection can't have orphans
|
||||
|
||||
if ( deleteOrphans ) { // handle orphaned entities!!
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Deleting orphans for collection: {0}", collectionType.getRole() );
|
||||
}
|
||||
// we can do the cast since orphan-delete does not apply to:
|
||||
|
@ -444,7 +447,7 @@ public final class Cascade {
|
|||
final String entityName = collectionType.getAssociatedEntityName( eventSource.getFactory() );
|
||||
deleteOrphans( entityName, (PersistentCollection) child );
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Done deleting orphans for collection: {0}", collectionType.getRole() );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public final class Collections {
|
|||
CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
|
||||
final CollectionPersister loadedPersister = entry.getLoadedPersister();
|
||||
|
||||
if ( LOG.isDebugEnabled() && loadedPersister != null ) {
|
||||
if ( loadedPersister != null && LOG.isDebugEnabled() ) {
|
||||
LOG.debugf(
|
||||
"Collection dereferenced: %s",
|
||||
MessageHelper.collectionInfoString( loadedPersister,
|
||||
|
|
|
@ -112,6 +112,7 @@ public class ParameterBinder {
|
|||
final NamedParameterSource source,
|
||||
final SessionImplementor session) throws SQLException, HibernateException {
|
||||
if ( namedParams != null ) {
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
// assumes that types are all of span 1
|
||||
Iterator iter = namedParams.entrySet().iterator();
|
||||
int result = 0;
|
||||
|
@ -121,7 +122,7 @@ public class ParameterBinder {
|
|||
TypedValue typedval = (TypedValue) e.getValue();
|
||||
int[] locations = source.getNamedParameterLocations( name );
|
||||
for ( int i = 0; i < locations.length; i++ ) {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf("bindNamedParameters() %s -> %s [%s]", typedval.getValue(), name, locations[i] + start);
|
||||
}
|
||||
typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session );
|
||||
|
|
|
@ -101,7 +101,7 @@ public final class TwoPhaseLoad {
|
|||
lazyPropertiesAreUnfetched
|
||||
);
|
||||
|
||||
if ( LOG.isTraceEnabled() && version != null ) {
|
||||
if ( version != null && LOG.isTraceEnabled() ) {
|
||||
String versionStr = persister.isVersioned()
|
||||
? persister.getVersionType().toLoggableString( version, session.getFactory() )
|
||||
: "null";
|
||||
|
@ -153,7 +153,8 @@ public final class TwoPhaseLoad {
|
|||
Serializable id = entityEntry.getId();
|
||||
Object[] hydratedState = entityEntry.getLoadedState();
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf(
|
||||
"Resolving associations for %s",
|
||||
MessageHelper.infoString( persister, id, session.getFactory() )
|
||||
|
@ -187,7 +188,7 @@ public final class TwoPhaseLoad {
|
|||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) {
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf(
|
||||
"Adding entity to second-level cache: %s",
|
||||
MessageHelper.infoString( persister, id, session.getFactory() )
|
||||
|
@ -272,7 +273,7 @@ public final class TwoPhaseLoad {
|
|||
session
|
||||
);
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf(
|
||||
"Done materializing entity %s",
|
||||
MessageHelper.infoString( persister, id, session.getFactory() )
|
||||
|
|
|
@ -136,11 +136,14 @@ public class SqlExceptionHelper {
|
|||
if (LOG.isEnabled(Level.ERROR)) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
message = StringHelper.isNotEmpty(message) ? message : DEFAULT_EXCEPTION_MSG;
|
||||
LOG.debug( message, sqlException );
|
||||
LOG.debug( message, sqlException );
|
||||
}
|
||||
final boolean warnEnabled = LOG.isEnabled( Level.WARN );
|
||||
while (sqlException != null) {
|
||||
StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState());
|
||||
LOG.warn(buf.toString());
|
||||
if ( warnEnabled ) {
|
||||
StringBuilder buf = new StringBuilder(30).append("SQL Error: ").append(sqlException.getErrorCode()).append(", SQLState: ").append(sqlException.getSQLState());
|
||||
LOG.warn(buf.toString());
|
||||
}
|
||||
LOG.error(sqlException.getMessage());
|
||||
sqlException = sqlException.getNextException();
|
||||
}
|
||||
|
|
|
@ -219,20 +219,21 @@ public class CollectionLoadContext {
|
|||
}
|
||||
|
||||
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( matchedCollectionEntries == null ) {
|
||||
if ( LOG.isDebugEnabled()) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
|
||||
if ( debugEnabled ) LOG.debugf( "No collections were found in result set for role: %s", persister.getRole() );
|
||||
return;
|
||||
}
|
||||
|
||||
final int count = matchedCollectionEntries.size();
|
||||
if ( LOG.isDebugEnabled()) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
|
||||
if ( debugEnabled ) LOG.debugf("%s collections were found in result set for role: %s", count, persister.getRole());
|
||||
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
|
||||
endLoadingCollection( lce, persister );
|
||||
}
|
||||
|
||||
if ( LOG.isDebugEnabled() ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
|
||||
if ( debugEnabled ) LOG.debugf( "%s collections initialized for role: %s", count, persister.getRole() );
|
||||
}
|
||||
|
||||
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
|
||||
|
@ -282,13 +283,16 @@ public class CollectionLoadContext {
|
|||
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
|
||||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Caching collection: %s", MessageHelper.collectionInfoString( persister, lce.getCollection(), lce.getKey(), session ) );
|
||||
}
|
||||
|
||||
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
|
||||
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
|
||||
LOG.debug( "Refusing to add to cache due to enabled filters" );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debug( "Refusing to add to cache due to enabled filters" );
|
||||
}
|
||||
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
|
||||
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
|
||||
// currently this works in conjuction with the check on
|
||||
|
|
|
@ -472,18 +472,19 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
|
|||
EntityEntry entry, //pass this as an argument only to avoid double looking
|
||||
SessionImplementor source) {
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( entry != null ) { // the object is persistent
|
||||
|
||||
//the entity is associated with the session, so check its status
|
||||
if ( entry.getStatus() != Status.DELETED ) {
|
||||
// do nothing for persistent instances
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Persistent instance of: {0}", getLoggableName( entityName, entity ) );
|
||||
}
|
||||
return EntityState.PERSISTENT;
|
||||
}
|
||||
// ie. e.status==DELETED
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Deleted instance of: {0}", getLoggableName( entityName, entity ) );
|
||||
}
|
||||
return EntityState.DELETED;
|
||||
|
@ -494,12 +495,12 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
|
|||
// try interceptor and unsaved-value
|
||||
|
||||
if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source )) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Transient instance of: {0}", getLoggableName( entityName, entity ) );
|
||||
}
|
||||
return EntityState.TRANSIENT;
|
||||
}
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Detached instance of: {0}", getLoggableName( entityName, entity ) );
|
||||
}
|
||||
return EntityState.DETACHED;
|
||||
|
|
|
@ -633,7 +633,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
|
|||
}
|
||||
|
||||
private void logDirtyProperties(Serializable id, int[] dirtyProperties, EntityPersister persister) {
|
||||
if ( LOG.isTraceEnabled() && dirtyProperties != null && dirtyProperties.length > 0 ) {
|
||||
if ( dirtyProperties != null && dirtyProperties.length > 0 && LOG.isTraceEnabled() ) {
|
||||
final String[] allPropertyNames = persister.getPropertyNames();
|
||||
final String[] dirtyPropertyNames = new String[ dirtyProperties.length ];
|
||||
for ( int i = 0; i < dirtyProperties.length; i++ ) {
|
||||
|
|
|
@ -59,12 +59,13 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
|
|||
CollectionEntry ce = source.getPersistenceContext().getCollectionEntry(collection);
|
||||
if (ce==null) throw new HibernateException("collection was evicted");
|
||||
if ( !collection.wasInitialized() ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Initializing collection {0}",
|
||||
MessageHelper.collectionInfoString( ce.getLoadedPersister(), collection, ce.getLoadedKey(), source ) );
|
||||
LOG.trace( "Checking second-level cache" );
|
||||
}
|
||||
|
||||
LOG.trace( "Checking second-level cache" );
|
||||
final boolean foundInCache = initializeCollectionFromCache(
|
||||
ce.getLoadedKey(),
|
||||
ce.getLoadedPersister(),
|
||||
|
@ -72,13 +73,17 @@ public class DefaultInitializeCollectionEventListener implements InitializeColle
|
|||
source
|
||||
);
|
||||
|
||||
if ( foundInCache ) {
|
||||
if ( foundInCache && traceEnabled ) {
|
||||
LOG.trace( "Collection initialized from cache" );
|
||||
}
|
||||
else {
|
||||
LOG.trace( "Collection not cached" );
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace( "Collection not cached" );
|
||||
}
|
||||
ce.getLoadedPersister().initialize( ce.getLoadedKey(), source );
|
||||
LOG.trace( "Collection initialized" );
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace( "Collection initialized" );
|
||||
}
|
||||
|
||||
if ( source.getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||
source.getFactory().getStatisticsImplementor().fetchCollection(
|
||||
|
|
|
@ -100,8 +100,9 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
|
|||
oldVersion = persister.getCurrentVersion( id, source );
|
||||
}
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( oldVersion != null ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Found existing row for {0}", MessageHelper.infoString( persister, id, source.getFactory() ) );
|
||||
}
|
||||
|
||||
|
@ -119,14 +120,14 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
|
|||
// else do nothing (don't even reassociate object!)
|
||||
if ( canReplicate )
|
||||
performReplication( entity, id, realOldVersion, persister, replicationMode, source );
|
||||
else
|
||||
else if ( traceEnabled )
|
||||
LOG.trace( "No need to replicate" );
|
||||
|
||||
//TODO: would it be better to do a refresh from db?
|
||||
}
|
||||
else {
|
||||
// no existing row - do an insert
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "No existing row, replicating new instance {0}",
|
||||
MessageHelper.infoString( persister, id, source.getFactory() ) );
|
||||
}
|
||||
|
|
|
@ -116,8 +116,10 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
}
|
||||
|
||||
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException {
|
||||
LOG.trace( "Ignoring persistent instance" );
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace( "Ignoring persistent instance" );
|
||||
}
|
||||
EntityEntry entityEntry = event.getEntry();
|
||||
if ( entityEntry == null ) {
|
||||
throw new AssertionFailure( "entity was transient or detached" );
|
||||
|
@ -152,7 +154,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Object already associated with session: {0}", MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) );
|
||||
}
|
||||
|
||||
|
@ -278,11 +280,12 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
Object entity,
|
||||
EntityPersister persister) throws HibernateException {
|
||||
|
||||
if ( !persister.isMutable() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled && !persister.isMutable() ) {
|
||||
LOG.trace( "Immutable instance passed to performUpdate()" );
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Updating {0}",
|
||||
MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory() ) );
|
||||
}
|
||||
|
@ -328,7 +331,7 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
|
|||
|
||||
persister.afterReassociate(entity, source);
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) );
|
||||
}
|
||||
|
||||
|
|
|
@ -311,7 +311,7 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
|
|||
}
|
||||
|
||||
public void setIncludeSubclasses(boolean includeSubclasses) {
|
||||
if ( LOG.isTraceEnabled() && isDereferencedBySuperclassOrSubclassProperty() && !includeSubclasses )
|
||||
if ( !includeSubclasses && isDereferencedBySuperclassOrSubclassProperty() && LOG.isTraceEnabled() )
|
||||
LOG.trace( "Attempt to disable subclass-inclusions : ", new Exception( "Stack-trace source" ) );
|
||||
this.includeSubclasses = includeSubclasses;
|
||||
}
|
||||
|
|
|
@ -122,7 +122,10 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
|
|||
private void initializePreviousValueHolder(SessionImplementor session) {
|
||||
previousValueHolder = IdentifierGeneratorHelper.getIntegralDataTypeHolder( returnClass );
|
||||
|
||||
LOG.debugf( "Fetching initial value: %s", sql );
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Fetching initial value: %s", sql );
|
||||
}
|
||||
try {
|
||||
PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
|
||||
try {
|
||||
|
@ -131,7 +134,7 @@ public class IncrementGenerator implements IdentifierGenerator, Configurable {
|
|||
if (rs.next()) previousValueHolder.initialize(rs, 0L).increment();
|
||||
else previousValueHolder.initialize(1L);
|
||||
sql = null;
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "First free id: %s", previousValueHolder.makeValue() );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -650,7 +650,12 @@ public final class SessionFactoryImpl
|
|||
MetadataImplementor metadata,
|
||||
SessionFactoryOptions sessionFactoryOptions,
|
||||
SessionFactoryObserver observer) throws HibernateException {
|
||||
LOG.debug( "Building session factory" );
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
final boolean debugEnabled = traceEnabled || LOG.isDebugEnabled();
|
||||
if ( debugEnabled ) {
|
||||
LOG.debug( "Building session factory" );
|
||||
}
|
||||
|
||||
this.sessionFactoryOptions = sessionFactoryOptions;
|
||||
|
||||
|
@ -692,9 +697,10 @@ public final class SessionFactoryImpl
|
|||
filters.put( filterDefinition.getFilterName(), filterDefinition );
|
||||
}
|
||||
|
||||
LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
|
||||
LOG.debugf( "Instantiating session factory with properties: %s", properties );
|
||||
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
|
||||
LOG.debugf( "Instantiating session factory with properties: %s", properties );
|
||||
}
|
||||
this.queryPlanCache = new QueryPlanCache( this );
|
||||
|
||||
class IntegratorObserver implements SessionFactoryObserver {
|
||||
|
@ -761,7 +767,7 @@ public final class SessionFactoryImpl
|
|||
accessStrategy = EntityRegionAccessStrategy.class.cast( entityAccessStrategies.get( cacheRegionName ) );
|
||||
if ( accessStrategy == null ) {
|
||||
final AccessType accessType = model.getHierarchyDetails().getCaching().getAccessType();
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Building cache for entity data [{0}]", model.getEntity().getName() );
|
||||
}
|
||||
EntityRegion entityRegion = settings.getRegionFactory().buildEntityRegion(
|
||||
|
@ -796,7 +802,7 @@ public final class SessionFactoryImpl
|
|||
final AccessType accessType = model.getCaching().getAccessType();
|
||||
CollectionRegionAccessStrategy accessStrategy = null;
|
||||
if ( accessType != null && settings.isSecondLevelCacheEnabled() ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Building cache for collection data [{0}]", model.getAttribute().getRole() );
|
||||
}
|
||||
CollectionRegion collectionRegion = settings.getRegionFactory().buildCollectionRegion(
|
||||
|
@ -887,7 +893,9 @@ public final class SessionFactoryImpl
|
|||
serviceRegistry.getService( JndiService.class )
|
||||
);
|
||||
|
||||
LOG.debug("Instantiated session factory");
|
||||
if ( debugEnabled ) {
|
||||
LOG.debug("Instantiated session factory");
|
||||
}
|
||||
|
||||
if ( settings.isAutoCreateSchema() ) {
|
||||
new SchemaExport( metadata )
|
||||
|
@ -1054,9 +1062,9 @@ public final class SessionFactoryImpl
|
|||
@SuppressWarnings( {"ThrowableResultOfMethodCallIgnored"})
|
||||
private Map<String,HibernateException> checkNamedQueries() throws HibernateException {
|
||||
Map<String,HibernateException> errors = new HashMap<String,HibernateException>();
|
||||
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
// Check named HQL queries
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Checking %s named HQL queries", namedQueries.size() );
|
||||
}
|
||||
Iterator itr = namedQueries.entrySet().iterator();
|
||||
|
@ -1066,7 +1074,9 @@ public final class SessionFactoryImpl
|
|||
final NamedQueryDefinition qd = ( NamedQueryDefinition ) entry.getValue();
|
||||
// this will throw an error if there's something wrong.
|
||||
try {
|
||||
LOG.debugf( "Checking named query: %s", queryName );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Checking named query: %s", queryName );
|
||||
}
|
||||
//TODO: BUG! this currently fails for named queries for non-POJO entities
|
||||
queryPlanCache.getHQLQueryPlan( qd.getQueryString(), false, Collections.EMPTY_MAP );
|
||||
}
|
||||
|
@ -1076,10 +1086,8 @@ public final class SessionFactoryImpl
|
|||
catch ( MappingException e ) {
|
||||
errors.put( queryName, e );
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Checking %s named SQL queries", namedSqlQueries.size() );
|
||||
}
|
||||
itr = namedSqlQueries.entrySet().iterator();
|
||||
|
@ -1089,7 +1097,9 @@ public final class SessionFactoryImpl
|
|||
final NamedSQLQueryDefinition qd = ( NamedSQLQueryDefinition ) entry.getValue();
|
||||
// this will throw an error if there's something wrong.
|
||||
try {
|
||||
LOG.debugf( "Checking named SQL query: %s", queryName );
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Checking named SQL query: %s", queryName );
|
||||
}
|
||||
// TODO : would be really nice to cache the spec on the query-def so as to not have to re-calc the hash;
|
||||
// currently not doable though because of the resultset-ref stuff...
|
||||
NativeSQLQuerySpecification spec;
|
||||
|
|
|
@ -2552,13 +2552,14 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
|
|||
return;
|
||||
}
|
||||
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
for ( Serializable pk : getPersistenceContext().getNaturalIdHelper().getCachedPkResolutions( entityPersister ) ) {
|
||||
final EntityKey entityKey = generateEntityKey( pk, entityPersister );
|
||||
final Object entity = getPersistenceContext().getEntity( entityKey );
|
||||
final EntityEntry entry = getPersistenceContext().getEntry( entity );
|
||||
|
||||
if ( entry == null ) {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debug(
|
||||
"Cached natural-id/pk resolution linked to null EntityEntry in persistence context : "
|
||||
+ MessageHelper.infoString( entityPersister, pk, getFactory() )
|
||||
|
|
|
@ -1348,16 +1348,16 @@ public abstract class Loader {
|
|||
final SessionImplementor session) {
|
||||
|
||||
if ( keys != null ) {
|
||||
final boolean debugEnabled = LOG.isDebugEnabled();
|
||||
// this is a collection initializer, so we must create a collection
|
||||
// for each of the passed-in keys, to account for the possibility
|
||||
// that the collection is empty and has no rows in the result set
|
||||
|
||||
CollectionPersister[] collectionPersisters = getCollectionPersisters();
|
||||
for ( int j=0; j<collectionPersisters.length; j++ ) {
|
||||
for ( int i = 0; i < keys.length; i++ ) {
|
||||
//handle empty collections
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
if ( debugEnabled ) {
|
||||
LOG.debugf( "Result set contains (possibly empty) collection: %s",
|
||||
MessageHelper.collectionInfoString( collectionPersisters[j], keys[i], getFactory() ) );
|
||||
}
|
||||
|
|
|
@ -3280,14 +3280,15 @@ public abstract class AbstractEntityPersister
|
|||
);
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Deleting entity: {0}", MessageHelper.infoString( this, id, getFactory() ) );
|
||||
if ( useVersion )
|
||||
LOG.tracev( "Version: {0}", version );
|
||||
}
|
||||
|
||||
if ( isTableCascadeDeleteEnabled( j ) ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Delete handled by foreign key constraint: {0}", getTableName( j ) );
|
||||
}
|
||||
return; //EARLY EXIT!
|
||||
|
|
|
@ -340,15 +340,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
|
|||
public void setValue(PreparedStatement st, Enum value, int index) throws SQLException {
|
||||
final Object jdbcValue = value == null ? null : extractJdbcValue( value );
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( jdbcValue == null ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Binding null to parameter: [%s]", index));
|
||||
}
|
||||
st.setNull( index, getSqlType() );
|
||||
return;
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Binding [%s] to parameter: [%s]", jdbcValue, index));
|
||||
}
|
||||
st.setObject( index, jdbcValue, EnumType.this.sqlType );
|
||||
|
@ -366,15 +367,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
|
|||
@Override
|
||||
public Enum getValue(ResultSet rs, String[] names) throws SQLException {
|
||||
final int ordinal = rs.getInt( names[0] );
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( rs.wasNull() ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Returning null as column [%s]", names[0]));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
final Enum enumValue = fromOrdinal( ordinal );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
|
||||
}
|
||||
return enumValue;
|
||||
|
@ -436,15 +438,16 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
|
|||
public Enum getValue(ResultSet rs, String[] names) throws SQLException {
|
||||
final String value = rs.getString( names[0] );
|
||||
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( rs.wasNull() ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Returning null as column [%s]", names[0]));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
final Enum enumValue = fromName( value );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(String.format("Returning [%s] as column [%s]", enumValue, names[0]));
|
||||
}
|
||||
return enumValue;
|
||||
|
|
|
@ -66,8 +66,9 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
|
|||
* {@inheritDoc}
|
||||
*/
|
||||
public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( value == null ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(
|
||||
String.format(
|
||||
NULL_BIND_MSG_TEMPLATE,
|
||||
|
@ -79,7 +80,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
|
|||
st.setNull( index, sqlDescriptor.getSqlType() );
|
||||
}
|
||||
else {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.trace(
|
||||
String.format(
|
||||
BIND_MSG_TEMPLATE,
|
||||
|
|
|
@ -26,12 +26,11 @@ package org.hibernate.type.descriptor.sql;
|
|||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.type.descriptor.ValueExtractor;
|
||||
import org.hibernate.type.descriptor.WrapperOptions;
|
||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Convenience base implementation of {@link org.hibernate.type.descriptor.ValueExtractor}
|
||||
|
@ -63,12 +62,15 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J> {
|
|||
*/
|
||||
public J extract(ResultSet rs, String name, WrapperOptions options) throws SQLException {
|
||||
final J value = doExtract( rs, name, options );
|
||||
final boolean traceEnabled = LOG.isTraceEnabled();
|
||||
if ( value == null || rs.wasNull() ) {
|
||||
LOG.tracev( "Found [null] as column [{0}]", name );
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Found [null] as column [{0}]", name );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
if ( traceEnabled ) {
|
||||
LOG.tracev( "Found [{0}] as column [{1}]", getJavaDescriptor().extractLoggableRepresentation( value ), name );
|
||||
}
|
||||
return value;
|
||||
|
|
|
@ -476,7 +476,7 @@ sessionFactory().getStatistics().clear();
|
|||
contactExists();
|
||||
thinkRandomTime();
|
||||
++completedIterations;
|
||||
if ( log.isTraceEnabled() ) {
|
||||
if ( trace ) {
|
||||
log.tracef( "Iteration completed {0}", completedIterations );
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue