HHH-6817 Logging of strings containing the percent character broken

This commit is contained in:
Sanne Grinovero 2011-11-22 23:52:36 +00:00 committed by Hardy Ferentschik
parent 0fc95e54b3
commit 6c7379c38f
39 changed files with 97 additions and 97 deletions

View File

@ -130,17 +130,17 @@ public class StandardQueryCache implements QueryCache {
logCachedResultDetails( key, spaces, returnTypes, cacheable );
if ( cacheable == null ) {
LOG.debugf( "Query results were not found in cache" );
LOG.debug( "Query results were not found in cache" );
return null;
}
Long timestamp = (Long) cacheable.get( 0 );
if ( !isNaturalKeyLookup && !isUpToDate( spaces, timestamp ) ) {
LOG.debugf( "Cached query results were not up-to-date" );
LOG.debug( "Cached query results were not up-to-date" );
return null;
}
LOG.debugf( "Returning cached query results" );
LOG.debug( "Returning cached query results" );
for ( int i = 1; i < cacheable.size(); i++ ) {
if ( returnTypes.length == 1 ) {
returnTypes[0].beforeAssemble( (Serializable) cacheable.get( i ), session );
@ -170,7 +170,7 @@ public class StandardQueryCache implements QueryCache {
// the uoe could occur while resolving
// associations, leaving the PC in an
// inconsistent state
LOG.debugf( "Unable to reassemble cached result set" );
LOG.debug( "Unable to reassemble cached result set" );
cacheRegion.evict( key );
return null;
}

View File

@ -985,7 +985,7 @@ public final class AnnotationBinder {
SharedCacheMode mode;
final Object value = mappings.getConfigurationProperties().get( "javax.persistence.sharedCache.mode" );
if ( value == null ) {
LOG.debugf( "No value specified for 'javax.persistence.sharedCache.mode'; using UNSPECIFIED" );
LOG.debug( "No value specified for 'javax.persistence.sharedCache.mode'; using UNSPECIFIED" );
mode = SharedCacheMode.UNSPECIFIED;
}
else {

View File

@ -77,7 +77,7 @@ public abstract class CollectionSecondPass implements SecondPass {
else {
msg += ", element: " + columns( collection.getElement() );
}
LOG.debugf( msg );
LOG.debug( msg );
}
}

View File

@ -1386,7 +1386,7 @@ public class Configuration implements Serializable {
* an entity having a PK made of a ManyToOne ...).
*/
private void processFkSecondPassInOrder() {
LOG.debugf("Processing fk mappings (*ToOne and JoinedSubclass)");
LOG.debug("Processing fk mappings (*ToOne and JoinedSubclass)");
List<FkSecondPass> fkSecondPasses = getFKSecondPassesOnly();
if ( fkSecondPasses.size() == 0 ) {
@ -1564,10 +1564,10 @@ public class Configuration implements Serializable {
}
private void originalSecondPassCompile() throws MappingException {
LOG.debugf( "Processing extends queue" );
LOG.debug( "Processing extends queue" );
processExtendsQueue();
LOG.debugf( "Processing collection mappings" );
LOG.debug( "Processing collection mappings" );
Iterator itr = secondPasses.iterator();
while ( itr.hasNext() ) {
SecondPass sp = (SecondPass) itr.next();
@ -1577,7 +1577,7 @@ public class Configuration implements Serializable {
}
}
LOG.debugf( "Processing native query and ResultSetMapping mappings" );
LOG.debug( "Processing native query and ResultSetMapping mappings" );
itr = secondPasses.iterator();
while ( itr.hasNext() ) {
SecondPass sp = (SecondPass) itr.next();
@ -1585,7 +1585,7 @@ public class Configuration implements Serializable {
itr.remove();
}
LOG.debugf( "Processing association property references" );
LOG.debug( "Processing association property references" );
itr = propertyReferences.iterator();
while ( itr.hasNext() ) {
@ -1607,7 +1607,7 @@ public class Configuration implements Serializable {
//TODO: Somehow add the newly created foreign keys to the internal collection
LOG.debugf( "Processing foreign key constraints" );
LOG.debug( "Processing foreign key constraints" );
itr = getTableMappings();
Set done = new HashSet();
@ -1618,7 +1618,7 @@ public class Configuration implements Serializable {
}
private int processExtendsQueue() {
LOG.debugf( "Processing extends queue" );
LOG.debug( "Processing extends queue" );
int added = 0;
ExtendsQueueEntry extendsQueueEntry = findPossibleExtends();
while ( extendsQueueEntry != null ) {
@ -3352,7 +3352,7 @@ public class Configuration implements Serializable {
}
private void processHbmXmlQueue() {
LOG.debugf( "Processing hbm.xml files" );
LOG.debug( "Processing hbm.xml files" );
for ( Map.Entry<XmlDocument, Set<String>> entry : hbmMetadataToEntityNamesMap.entrySet() ) {
// Unfortunately we have to create a Mappings instance for each iteration here
processHbmXml( entry.getKey(), entry.getValue() );
@ -3382,7 +3382,7 @@ public class Configuration implements Serializable {
}
private void processAnnotatedClassesQueue() {
LOG.debugf( "Process annotated classes" );
LOG.debug( "Process annotated classes" );
//bind classes in the correct order calculating some inheritance state
List<XClass> orderedClasses = orderAndFillHierarchy( annotatedClasses );
Mappings mappings = createMappings();

View File

@ -1320,7 +1320,7 @@ public final class HbmBinder {
if ( columns.length() > 0 ) msg += " -> " + columns;
// TODO: this fails if we run with debug on!
// if ( model.getType()!=null ) msg += ", type: " + model.getType().getName();
LOG.debugf( msg );
LOG.debug( msg );
}
property.setMetaAttributes( getMetas( node, inheritedMetas ) );

View File

@ -344,7 +344,7 @@ public class SettingsFactory implements Serializable {
// return new org.hibernate.bytecode.internal.javassist.BytecodeProviderImpl();
// }
// else {
// LOG.debugf("Using javassist as bytecode provider by default");
// LOG.debug("Using javassist as bytecode provider by default");
// return new org.hibernate.bytecode.internal.javassist.BytecodeProviderImpl();
// }
// }

View File

@ -449,7 +449,7 @@ public class TableBinder {
org.hibernate.mapping.Column col;
Table table = referencedEntity.getTable(); //works cause the pk has to be on the primary table
if ( !idColItr.hasNext() ) {
LOG.debugf( "No column in the identifier!" );
LOG.debug( "No column in the identifier!" );
}
while ( idColItr.hasNext() ) {
boolean match = false;

View File

@ -894,7 +894,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
*/
public void initializeNonLazyCollections() throws HibernateException {
if ( loadCounter == 0 ) {
LOG.debugf( "Initializing non-lazy collections" );
LOG.debug( "Initializing non-lazy collections" );
//do this work only at the very highest level of the load
loadCounter++; //don't let this method be called recursively
try {

View File

@ -128,7 +128,7 @@ public abstract class AbstractBatchImpl implements Batch {
statements.put( sql, statement );
}
else {
LOG.debugf( "Reusing batch statement" );
LOG.debug( "Reusing batch statement" );
sqlStatementLogger().logStatement( sql );
}
return statement;

View File

@ -94,7 +94,7 @@ public class BatchingBatch extends AbstractBatchImpl {
@Override
protected void doExecuteBatch() {
if ( batchPosition == 0 ) {
LOG.debugf( "No batched statements to execute" );
LOG.debug( "No batched statements to execute" );
}
else {
LOG.debugf( "Executing batch size: %s", batchPosition );
@ -110,7 +110,7 @@ public class BatchingBatch extends AbstractBatchImpl {
checkRowCounts( statement.executeBatch(), statement );
}
catch ( SQLException e ) {
LOG.debugf( "SQLException escaped proxy", e );
LOG.debug( "SQLException escaped proxy", e );
throw sqlExceptionHelper().convert( e, "could not perform addBatch", entry.getKey() );
}
}

View File

@ -118,7 +118,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
}
else if ( connectionReleaseMode == ConnectionReleaseMode.AFTER_STATEMENT &&
! jdbcServices.getConnectionProvider().supportsAggressiveRelease() ) {
LOG.debugf( "Connection provider reports to not support aggressive release; overriding" );
LOG.debug( "Connection provider reports to not support aggressive release; overriding" );
return ConnectionReleaseMode.AFTER_TRANSACTION;
}
else {
@ -236,11 +236,11 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
LOG.tracev( "Starting after statement execution processing [{0}]", connectionReleaseMode );
if ( connectionReleaseMode == ConnectionReleaseMode.AFTER_STATEMENT ) {
if ( ! releasesEnabled ) {
LOG.debugf( "Skipping aggressive release due to manual disabling" );
LOG.debug( "Skipping aggressive release due to manual disabling" );
return;
}
if ( jdbcResourceRegistry.hasRegisteredResources() ) {
LOG.debugf( "Skipping aggressive release due to registered resources" );
LOG.debug( "Skipping aggressive release due to registered resources" );
return;
}
releaseConnection();
@ -277,10 +277,10 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
*/
public void aggressiveRelease() {
if ( isUserSuppliedConnection ) {
LOG.debugf( "Cannot aggressively release user-supplied connection; skipping" );
LOG.debug( "Cannot aggressively release user-supplied connection; skipping" );
}
else {
LOG.debugf( "Aggressively releasing JDBC connection" );
LOG.debug( "Aggressively releasing JDBC connection" );
if ( physicalConnection != null ) {
releaseConnection();
}
@ -294,13 +294,13 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
* @throws org.hibernate.JDBCException Indicates problem opening a connection
*/
private void obtainConnection() throws JDBCException {
LOG.debugf( "Obtaining JDBC connection" );
LOG.debug( "Obtaining JDBC connection" );
try {
physicalConnection = jdbcConnectionAccess.obtainConnection();
for ( ConnectionObserver observer : observers ) {
observer.physicalConnectionObtained( physicalConnection );
}
LOG.debugf( "Obtained JDBC connection" );
LOG.debug( "Obtained JDBC connection" );
}
catch ( SQLException sqle) {
throw getJdbcServices().getSqlExceptionHelper().convert( sqle, "Could not open connection" );
@ -313,7 +313,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
* @throws JDBCException Indicates problem closing a connection
*/
private void releaseConnection() throws JDBCException {
LOG.debugf( "Releasing JDBC connection" );
LOG.debug( "Releasing JDBC connection" );
if ( physicalConnection == null ) {
return;
}
@ -331,7 +331,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
finally {
physicalConnection = null;
}
LOG.debugf( "Released JDBC connection" );
LOG.debug( "Released JDBC connection" );
for ( ConnectionObserver observer : observers ) {
observer.physicalConnectionReleased();
}
@ -380,7 +380,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
);
}
physicalConnection = suppliedConnection;
LOG.debugf( "Reconnected JDBC connection" );
LOG.debug( "Reconnected JDBC connection" );
}
}

View File

@ -289,7 +289,7 @@ public class CollectionLoadContext {
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
LOG.debugf( "Refusing to add to cache due to enabled filters" );
LOG.debug( "Refusing to add to cache due to enabled filters" );
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
// currently this works in conjuction with the check on

View File

@ -476,7 +476,7 @@ public class ActionQueue {
public static ActionQueue deserialize(
ObjectInputStream ois,
SessionImplementor session) throws IOException, ClassNotFoundException {
LOG.tracev( "Dedeserializing action-queue" );
LOG.trace( "Dedeserializing action-queue" );
ActionQueue rtn = new ActionQueue( session );
int queueSize = ois.readInt();

View File

@ -61,7 +61,7 @@ import org.hibernate.persister.entity.EntityPersister;
/**
* A convenience base class for listeners whose functionality results in flushing.
*
* @author Steve Eberole
* @author Steve Ebersole
*/
public abstract class AbstractFlushingEventListener implements Serializable {
@ -143,7 +143,7 @@ public abstract class AbstractFlushingEventListener implements Serializable {
*/
private void prepareEntityFlushes(EventSource session) throws HibernateException {
LOG.debugf( "Processing flush-time cascades" );
LOG.debug( "Processing flush-time cascades" );
final Map.Entry[] list = IdentityMap.concurrentEntries( session.getPersistenceContext().getEntityEntries() );
//safe from concurrent modification because of how entryList() is implemented on IdentityMap
@ -186,7 +186,7 @@ public abstract class AbstractFlushingEventListener implements Serializable {
// Initialize dirty flags for arrays + collections with composite elements
// and reset reached, doupdate, etc.
LOG.debugf( "Dirty checking collections" );
LOG.debug( "Dirty checking collections" );
final List list = IdentityMap.entries( session.getPersistenceContext().getCollectionEntries() );
final int size = list.size();

View File

@ -207,9 +207,9 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
// Sub-insertions should occur before containing insertion so
// Try to do the callback now
if ( persister.implementsLifecycle() ) {
LOG.debugf( "Calling onSave()" );
LOG.debug( "Calling onSave()" );
if ( ( ( Lifecycle ) entity ).onSave( source ) ) {
LOG.debugf( "Insertion vetoed by onSave()" );
LOG.debug( "Insertion vetoed by onSave()" );
return true;
}
}
@ -300,14 +300,14 @@ public abstract class AbstractSaveEventListener extends AbstractReassociateEvent
values, entity, persister, source, shouldDelayIdentityInserts
);
if ( !shouldDelayIdentityInserts ) {
LOG.debugf( "Executing identity-insert immediately" );
LOG.debug( "Executing identity-insert immediately" );
source.getActionQueue().execute( insert );
id = insert.getGeneratedId();
key = source.generateEntityKey( id, persister );
source.getPersistenceContext().checkUniqueness( key, entity );
}
else {
LOG.debugf( "Delaying identity-insert due to no transaction in progress" );
LOG.debug( "Delaying identity-insert due to no transaction in progress" );
source.getActionQueue().addAction( insert );
key = insert.getDelayedEntityKey();
}

View File

@ -300,9 +300,9 @@ public class DefaultDeleteEventListener implements DeleteEventListener {
protected boolean invokeDeleteLifecycle(EventSource session, Object entity, EntityPersister persister) {
if ( persister.implementsLifecycle() ) {
LOG.debugf( "Calling onDelete()" );
LOG.debug( "Calling onDelete()" );
if ( ( ( Lifecycle ) entity ).onDelete( session ) ) {
LOG.debugf( "Deletion vetoed by onDelete()" );
LOG.debug( "Deletion vetoed by onDelete()" );
return true;
}
}

View File

@ -55,9 +55,9 @@ public class DefaultDirtyCheckEventListener extends AbstractFlushingEventListene
flushEverythingToExecutions(event);
boolean wasNeeded = event.getSession().getActionQueue().hasAnyQueuedActions();
if ( wasNeeded )
LOG.debugf( "Session dirty" );
LOG.debug( "Session dirty" );
else
LOG.debugf( "Session not dirty" );
LOG.debug( "Session not dirty" );
event.setDirty( wasNeeded );
}
finally {

View File

@ -394,11 +394,11 @@ public class DefaultLoadEventListener extends AbstractLockUpgradeEventListener i
Object entity = loadFromSessionCache( event, keyToLoad, options );
if ( entity == REMOVED_ENTITY_MARKER ) {
LOG.debugf( "Load request found matching entity in context, but it is scheduled for removal; returning null" );
LOG.debug( "Load request found matching entity in context, but it is scheduled for removal; returning null" );
return null;
}
if ( entity == INCONSISTENT_RTN_CLASS_MARKER ) {
LOG.debugf( "Load request found matching entity in context, but the matched entity was of an inconsistent return type; returning null" );
LOG.debug( "Load request found matching entity in context, but the matched entity was of an inconsistent return type; returning null" );
return null;
}
if ( entity != null ) {

View File

@ -116,7 +116,7 @@ public class DefaultPersistEventListener extends AbstractSaveEventListener imple
EntityPersister persister = source.getFactory().getEntityPersister( entityName );
if ( ForeignGenerator.class.isInstance( persister.getIdentifierGenerator() ) ) {
if ( LOG.isDebugEnabled() && persister.getIdentifier( entity, source ) != null ) {
LOG.debugf( "Resetting entity id attribute to null for foreign generator" );
LOG.debug( "Resetting entity id attribute to null for foreign generator" );
}
persister.setIdentifier( entity, null, source );
entityState = getEntityState( entity, entityName, entityEntry, source );

View File

@ -337,9 +337,9 @@ public class DefaultSaveOrUpdateEventListener extends AbstractSaveEventListener
protected boolean invokeUpdateLifecycle(Object entity, EntityPersister persister, EventSource source) {
if ( persister.implementsLifecycle() ) {
LOG.debugf( "Calling onUpdate()" );
LOG.debug( "Calling onUpdate()" );
if ( ( (Lifecycle) entity ).onUpdate( source ) ) {
LOG.debugf( "Update vetoed by onUpdate()" );
LOG.debug( "Update vetoed by onUpdate()" );
return true;
}
}

View File

@ -60,7 +60,7 @@ public class ErrorCounter implements ParseErrorHandler {
}
public void reportWarning(String message) {
LOG.debugf(message);
LOG.debug(message);
warningList.add( message );
}
@ -79,6 +79,6 @@ public class ErrorCounter implements ParseErrorHandler {
if (recognitionExceptions.size() > 0) throw QuerySyntaxException.convert((RecognitionException)recognitionExceptions.get(0));
throw new QueryException(getErrorString());
}
LOG.debugf("throwQueryException() : no errors");
LOG.debug("throwQueryException() : no errors");
}
}

View File

@ -252,7 +252,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
ASTUtil.createSibling( inputAstFactory, HqlTokenTypes.ALIAS, "this", fromElement );
fromClauseInput.addChild( fromElement );
// Show the modified AST.
LOG.debugf("prepareFromClauseInputTree() : Filter - Added 'this' as a from element...");
LOG.debug("prepareFromClauseInputTree() : Filter - Added 'this' as a from element...");
queryTranslatorImpl.showHqlAst( hqlParser.getAST() );
// Create a parameter specification for the collection filter...
@ -349,7 +349,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
join.addCondition( fkTableAlias, keyColumnNames, " = ?" );
fromElement.setJoinSequence( join );
fromElement.setFilter( true );
LOG.debugf("createFromFilterElement() : processed filter FROM element.");
LOG.debug("createFromFilterElement() : processed filter FROM element.");
return fromElement;
}
@ -872,7 +872,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
select.setNextSibling( sibling );
selectClause = ( SelectClause ) select;
selectClause.initializeDerivedSelectClause( currentFromClause );
LOG.debugf( "Derived SELECT clause created." );
LOG.debug( "Derived SELECT clause created." );
}
@Override

View File

@ -164,7 +164,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
private synchronized void doCompile(Map replacements, boolean shallow, String collectionRole) {
// If the query is already compiled, skip the compilation.
if ( compiled ) {
LOG.debugf( "compile() : The query is already compiled, skipping..." );
LOG.debug( "compile() : The query is already compiled, skipping..." );
return;
}

View File

@ -206,8 +206,8 @@ public abstract class AbstractStatementExecutor implements StatementExecutor {
@Override
protected void logWarning(String description, String message) {
LOG.debugf( description );
LOG.debugf( message );
LOG.debug( description );
LOG.debug( message );
}
};

View File

@ -145,7 +145,7 @@ public class FromElementFactory implements SqlTokenTypes {
// If the from element isn't in the same clause, create a new from element.
if ( fromElement.getFromClause() != fromClause ) {
LOG.debugf( "createFromElementInSubselect() : creating a new FROM element..." );
LOG.debug( "createFromElementInSubselect() : creating a new FROM element..." );
fromElement = createFromElement( entityPersister );
initializeAndAddFromElement( fromElement,
path,
@ -254,7 +254,7 @@ public class FromElementFactory implements SqlTokenTypes {
EntityPersister entityPersister = elem.getEntityPersister();
int numberOfTables = entityPersister.getQuerySpaces().length;
if ( numberOfTables > 1 && implied && !elem.useFromFragment() ) {
LOG.debugf( "createEntityJoin() : Implied multi-table entity join" );
LOG.debug( "createEntityJoin() : Implied multi-table entity join" );
elem.setUseFromFragment( true );
}

View File

@ -98,7 +98,7 @@ public class QueryNode extends AbstractRestrictableStatement implements SelectEx
// if there is no order by, make one
if ( orderByClause == null ) {
LOG.debugf( "getOrderByClause() : Creating a new ORDER BY clause" );
LOG.debug( "getOrderByClause() : Creating a new ORDER BY clause" );
orderByClause = ( OrderByClause ) ASTUtil.create( getWalker().getASTFactory(), SqlTokenTypes.ORDER, "ORDER" );
// Find the WHERE; if there is no WHERE, find the FROM...

View File

@ -84,7 +84,7 @@ public final class IteratorImpl implements HibernateIterator {
public void close() throws JDBCException {
if (ps!=null) {
try {
LOG.debugf("Closing iterator");
LOG.debug("Closing iterator");
ps.close();
ps = null;
rs = null;
@ -110,12 +110,12 @@ public final class IteratorImpl implements HibernateIterator {
}
private void postNext() throws SQLException {
LOG.debugf("Attempting to retrieve next results");
LOG.debug("Attempting to retrieve next results");
this.hasNext = rs.next();
if (!hasNext) {
LOG.debugf("Exhausted results");
LOG.debug("Exhausted results");
close();
} else LOG.debugf("Retrieved next results");
} else LOG.debug("Retrieved next results");
}
public boolean hasNext() {

View File

@ -439,7 +439,7 @@ public final class SessionFactoryImpl
}
SessionFactoryRegistry.INSTANCE.addSessionFactory( uuid, name, this, serviceRegistry.getService( JndiService.class ) );
LOG.debugf( "Instantiated session factory" );
LOG.debug( "Instantiated session factory" );
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( serviceRegistry, cfg )
@ -767,7 +767,7 @@ public final class SessionFactoryImpl
}
SessionFactoryRegistry.INSTANCE.addSessionFactory( uuid, name, this, serviceRegistry.getService( JndiService.class ) );
LOG.debugf("Instantiated session factory");
LOG.debug("Instantiated session factory");
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( metadata )
@ -1097,10 +1097,10 @@ public final class SessionFactoryImpl
if ( result == null ) {
throw new InvalidObjectException( "Could not find a SessionFactory [uuid=" + uuid + ",name=" + name + "]" );
}
LOG.debugf( "Resolved SessionFactory by name" );
LOG.debug( "Resolved SessionFactory by name" );
}
else {
LOG.debugf( "Resolved SessionFactory by UUID" );
LOG.debug( "Resolved SessionFactory by UUID" );
}
return result;
}

View File

@ -465,14 +465,14 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
@Override
public Connection disconnect() throws HibernateException {
errorIfClosed();
LOG.debugf( "Disconnecting session" );
LOG.debug( "Disconnecting session" );
return transactionCoordinator.getJdbcCoordinator().getLogicalConnection().manualDisconnect();
}
@Override
public void reconnect(Connection conn) throws HibernateException {
errorIfClosed();
LOG.debugf( "Reconnecting session" );
LOG.debug( "Reconnecting session" );
checkTransactionSynchStatus();
transactionCoordinator.getJdbcCoordinator().getLogicalConnection().manualReconnect( conn );
}
@ -1060,9 +1060,9 @@ public final class SessionImpl extends AbstractSessionImpl implements EventSourc
public boolean isDirty() throws HibernateException {
errorIfClosed();
checkTransactionSynchStatus();
LOG.debugf( "Checking session dirtiness" );
LOG.debug( "Checking session dirtiness" );
if ( actionQueue.areInsertionsOrDeletionsQueued() ) {
LOG.debugf( "Session dirty (scheduled updates and insertions)" );
LOG.debug( "Session dirty (scheduled updates and insertions)" );
return true;
}
DirtyCheckEvent event = new DirtyCheckEvent( this );

View File

@ -111,14 +111,14 @@ public final class EntityPrinter {
// Cannot use Map as an argument because it clashes with the previous method (due to type erasure)
public void toString(Iterable<Map.Entry<EntityKey,Object>> entitiesByEntityKey) throws HibernateException {
if ( ! LOG.isDebugEnabled() || ! entitiesByEntityKey.iterator().hasNext() ) return;
LOG.debugf( "Listing entities:" );
LOG.debug( "Listing entities:" );
int i=0;
for ( Map.Entry<EntityKey,Object> entityKeyAndEntity : entitiesByEntityKey ) {
if (i++>20) {
LOG.debugf("More......");
LOG.debug("More......");
break;
}
LOG.debugf( toString( entityKeyAndEntity.getKey().getEntityName(), entityKeyAndEntity.getValue() ) );
LOG.debug( toString( entityKeyAndEntity.getKey().getEntityName(), entityKeyAndEntity.getValue() ) );
}
}

View File

@ -67,16 +67,16 @@ public class DTDEntityResolver implements EntityResolver, Serializable {
if ( systemId != null ) {
LOG.debugf( "Trying to resolve system-id [%s]", systemId );
if ( systemId.startsWith( HIBERNATE_NAMESPACE ) ) {
LOG.debugf( "Recognized hibernate namespace; attempting to resolve on classpath under org/hibernate/" );
LOG.debug( "Recognized hibernate namespace; attempting to resolve on classpath under org/hibernate/" );
source = resolveOnClassPath( publicId, systemId, HIBERNATE_NAMESPACE );
}
else if ( systemId.startsWith( OLD_HIBERNATE_NAMESPACE ) ) {
LOG.recognizedObsoleteHibernateNamespace( OLD_HIBERNATE_NAMESPACE, HIBERNATE_NAMESPACE );
LOG.debugf( "Attempting to resolve on classpath under org/hibernate/" );
LOG.debug( "Attempting to resolve on classpath under org/hibernate/" );
source = resolveOnClassPath( publicId, systemId, OLD_HIBERNATE_NAMESPACE );
}
else if ( systemId.startsWith( USER_NAMESPACE ) ) {
LOG.debugf( "Recognized local namespace; attempting to resolve on classpath" );
LOG.debug( "Recognized local namespace; attempting to resolve on classpath" );
String path = systemId.substring( USER_NAMESPACE.length() );
InputStream stream = resolveInLocalNamespace( path );
if ( stream == null ) {

View File

@ -122,10 +122,10 @@ public class SessionFactoryStub implements SessionFactory {
if ( result == null ) {
throw new InvalidObjectException( "Could not find a SessionFactory [uuid=" + uuid + ",name=" + name + "]" );
}
LOG.debugf("Resolved stub SessionFactory by name");
LOG.debug("Resolved stub SessionFactory by name");
}
else {
LOG.debugf("Resolved stub SessionFactory by UUID");
LOG.debug("Resolved stub SessionFactory by UUID");
}
return result;
}

View File

@ -2067,7 +2067,7 @@ public abstract class Loader {
);
}
LOG.debugf( "Done entity load" );
LOG.debug( "Done entity load" );
return result;
@ -2085,7 +2085,7 @@ public abstract class Loader {
final Type indexType,
final EntityPersister persister) throws HibernateException {
LOG.debugf( "Loading collection element by index" );
LOG.debug( "Loading collection element by index" );
List result;
try {
@ -2106,7 +2106,7 @@ public abstract class Loader {
);
}
LOG.debugf( "Done entity load" );
LOG.debug( "Done entity load" );
return result;
@ -2152,7 +2152,7 @@ public abstract class Loader {
);
}
LOG.debugf( "Done entity batch load" );
LOG.debug( "Done entity batch load" );
return result;
@ -2187,7 +2187,7 @@ public abstract class Loader {
);
}
LOG.debugf( "Done loading collection" );
LOG.debug( "Done loading collection" );
}
@ -2221,7 +2221,7 @@ public abstract class Loader {
);
}
LOG.debugf( "Done batch load" );
LOG.debug( "Done batch load" );
}

View File

@ -1123,7 +1123,7 @@ public abstract class AbstractCollectionPersister
}
}
LOG.debugf( "Done deleting collection" );
LOG.debug( "Done deleting collection" );
}
catch ( SQLException sqle ) {
throw sqlExceptionHelper.convert(
@ -1233,7 +1233,7 @@ public abstract class AbstractCollectionPersister
}
else {
LOG.debugf( "Collection was empty" );
LOG.debug( "Collection was empty" );
}
}
catch ( SQLException sqle ) {
@ -1341,7 +1341,7 @@ public abstract class AbstractCollectionPersister
}
}
else {
LOG.debugf( "No rows to delete" );
LOG.debug( "No rows to delete" );
}
}
catch ( SQLException sqle ) {

View File

@ -54,7 +54,7 @@ public final class NamedQueryLoader implements UniqueEntityLoader {
}
public Object load(Serializable id, Object optionalObject, SessionImplementor session, LockOptions lockOptions) {
if (lockOptions != null) LOG.debugf("Ignoring lock-options passed to named query loader");
if (lockOptions != null) LOG.debug("Ignoring lock-options passed to named query loader");
return load( id, optionalObject, session );
}

View File

@ -169,7 +169,7 @@ public class DriverManagerConnectionProviderImpl implements ConnectionProvider,
// otherwise we open a new connection...
LOG.debugf( "Opening new JDBC connection" );
LOG.debug( "Opening new JDBC connection" );
Connection conn = DriverManager.getConnection( url, connectionProps );
if ( isolation != null ) {
conn.setTransactionIsolation( isolation.intValue() );
@ -199,7 +199,7 @@ public class DriverManagerConnectionProviderImpl implements ConnectionProvider,
}
}
LOG.debugf( "Closing JDBC connection" );
LOG.debug( "Closing JDBC connection" );
conn.close();
}

View File

@ -482,7 +482,7 @@ public class SchemaExport {
String formatted = formatter.format( sql );
if (delimiter != null) formatted += delimiter;
if (script) System.out.println(formatted);
LOG.debugf(formatted);
LOG.debug(formatted);
if ( outputFile != null ) {
fileOutput.write( formatted + "\n" );
}

View File

@ -223,7 +223,7 @@ public class SchemaUpdate {
outputFileWriter.write( formatted + "\n" );
}
if ( target.doExport() ) {
LOG.debugf( sql );
LOG.debug( sql );
stmt.executeUpdate( formatted );
}
}

View File

@ -69,7 +69,7 @@ public class DbTimestampType extends TimestampType {
return super.seed( session );
}
else if ( !session.getFactory().getDialect().supportsCurrentTimestampSelection() ) {
LOG.debugf( "Falling back to vm-based timestamp, as dialect does not support current timestamp selection" );
LOG.debug( "Falling back to vm-based timestamp, as dialect does not support current timestamp selection" );
return super.seed( session );
}
else {