HHH-17000 Do not keep static references to log levels

This commit is contained in:
Sanne Grinovero 2023-07-26 21:12:51 +01:00 committed by Sanne Grinovero
parent a098e08a75
commit 8e2a093c48
68 changed files with 184 additions and 294 deletions

View File

@ -21,7 +21,4 @@ import org.jboss.logging.Logger;
public interface BootLogging { public interface BootLogging {
String NAME = SubSystemLogging.BASE + ".boot"; String NAME = SubSystemLogging.BASE + ".boot";
Logger BOOT_LOGGER = Logger.getLogger( NAME ); Logger BOOT_LOGGER = Logger.getLogger( NAME );
boolean DEBUG_ENABLED = BOOT_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = BOOT_LOGGER.isTraceEnabled();
} }

View File

@ -25,9 +25,5 @@ import org.jboss.logging.annotations.ValidIdRange;
) )
public interface JaxbLogger extends BasicLogger { public interface JaxbLogger extends BasicLogger {
String LOGGER_NAME = BootLogging.NAME + ".jaxb"; String LOGGER_NAME = BootLogging.NAME + ".jaxb";
JaxbLogger JAXB_LOGGER = Logger.getMessageLogger( JaxbLogger.class, LOGGER_NAME ); JaxbLogger JAXB_LOGGER = Logger.getMessageLogger( JaxbLogger.class, LOGGER_NAME );
boolean TRACE_ENABLED = JAXB_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = JAXB_LOGGER.isDebugEnabled();
} }

View File

@ -21,7 +21,4 @@ import org.jboss.logging.Logger;
public interface BootQueryLogging { public interface BootQueryLogging {
String NAME = BootLogging.NAME + ".query"; String NAME = BootLogging.NAME + ".query";
Logger BOOT_QUERY_LOGGER = Logger.getLogger( NAME ); Logger BOOT_QUERY_LOGGER = Logger.getLogger( NAME );
boolean DEBUG_ENABLED = BOOT_QUERY_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = BOOT_QUERY_LOGGER.isTraceEnabled();
} }

View File

@ -19,9 +19,5 @@ import org.jboss.logging.Logger;
) )
public interface BytecodeLogging { public interface BytecodeLogging {
String LOGGER_NAME = SubSystemLogging.BASE + "bytecode"; String LOGGER_NAME = SubSystemLogging.BASE + "bytecode";
Logger LOGGER = Logger.getLogger( LOGGER_NAME ); Logger LOGGER = Logger.getLogger( LOGGER_NAME );
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
} }

View File

@ -34,9 +34,6 @@ public interface BytecodeInterceptorLogging extends BasicLogger {
Logger LOGGER = Logger.getLogger( LOGGER_NAME ); Logger LOGGER = Logger.getLogger( LOGGER_NAME );
BytecodeInterceptorLogging MESSAGE_LOGGER = Logger.getMessageLogger(BytecodeInterceptorLogging.class, LOGGER_NAME ); BytecodeInterceptorLogging MESSAGE_LOGGER = Logger.getMessageLogger(BytecodeInterceptorLogging.class, LOGGER_NAME );
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
@LogMessage(level = WARN) @LogMessage(level = WARN)
@Message( @Message(
id = 90005901, id = 90005901,

View File

@ -18,7 +18,6 @@ import org.hibernate.cache.spi.QueryResultsRegion;
import org.hibernate.cache.spi.TimestampsCache; import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import static org.hibernate.cache.spi.SecondLevelCacheLogger.DEBUG_ENABLED;
import static org.hibernate.cache.spi.SecondLevelCacheLogger.L2CACHE_LOGGER; import static org.hibernate.cache.spi.SecondLevelCacheLogger.L2CACHE_LOGGER;
/** /**
@ -51,7 +50,7 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
final QueryKey key, final QueryKey key,
final List<?> results, final List<?> results,
final SharedSessionContractImplementor session) throws HibernateException { final SharedSessionContractImplementor session) throws HibernateException {
if ( DEBUG_ENABLED ) { if ( L2CACHE_LOGGER.isDebugEnabled() ) {
L2CACHE_LOGGER.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), session.getCacheTransactionSynchronization().getCachingTimestamp() ); L2CACHE_LOGGER.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), session.getCacheTransactionSynchronization().getCachingTimestamp() );
} }
@ -80,26 +79,27 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
final QueryKey key, final QueryKey key,
final Set<String> spaces, final Set<String> spaces,
final SharedSessionContractImplementor session) throws HibernateException { final SharedSessionContractImplementor session) throws HibernateException {
if ( DEBUG_ENABLED ) { final boolean loggerDebugEnabled = L2CACHE_LOGGER.isDebugEnabled();
if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() ); L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
} }
final CacheItem cacheItem = getCachedData( key, session ); final CacheItem cacheItem = getCachedData( key, session );
if ( cacheItem == null ) { if ( cacheItem == null ) {
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Query results were not found in cache" ); L2CACHE_LOGGER.debug( "Query results were not found in cache" );
} }
return null; return null;
} }
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) { if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" ); L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" );
} }
return null; return null;
} }
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Returning cached query results" ); L2CACHE_LOGGER.debug( "Returning cached query results" );
} }
@ -111,26 +111,27 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
final QueryKey key, final QueryKey key,
final String[] spaces, final String[] spaces,
final SharedSessionContractImplementor session) throws HibernateException { final SharedSessionContractImplementor session) throws HibernateException {
if ( DEBUG_ENABLED ) { final boolean loggerDebugEnabled = L2CACHE_LOGGER.isDebugEnabled();
if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() ); L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
} }
final CacheItem cacheItem = getCachedData( key, session ); final CacheItem cacheItem = getCachedData( key, session );
if ( cacheItem == null ) { if ( cacheItem == null ) {
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Query results were not found in cache" ); L2CACHE_LOGGER.debug( "Query results were not found in cache" );
} }
return null; return null;
} }
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) { if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" ); L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" );
} }
return null; return null;
} }
if ( DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
L2CACHE_LOGGER.debug( "Returning cached query results" ); L2CACHE_LOGGER.debug( "Returning cached query results" );
} }

View File

@ -33,9 +33,6 @@ public interface SecondLevelCacheLogger extends BasicLogger {
SecondLevelCacheLogger L2CACHE_LOGGER = Logger.getMessageLogger( SecondLevelCacheLogger.class, LOGGER_NAME ); SecondLevelCacheLogger L2CACHE_LOGGER = Logger.getMessageLogger( SecondLevelCacheLogger.class, LOGGER_NAME );
boolean DEBUG_ENABLED = L2CACHE_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = L2CACHE_LOGGER.isTraceEnabled();
int NAMESPACE = 90001000; int NAMESPACE = 90001000;
@LogMessage(level = WARN) @LogMessage(level = WARN)

View File

@ -30,9 +30,6 @@ public interface DialectLogging {
Logger DIALECT_LOGGER = Logger.getLogger(LOGGER_NAME); Logger DIALECT_LOGGER = Logger.getLogger(LOGGER_NAME);
DialectLogging DIALECT_MESSAGE_LOGGER = Logger.getMessageLogger(DialectLogging.class, LOGGER_NAME); DialectLogging DIALECT_MESSAGE_LOGGER = Logger.getMessageLogger(DialectLogging.class, LOGGER_NAME);
boolean DEBUG_ENABLED = DIALECT_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = DIALECT_LOGGER.isTraceEnabled();
@LogMessage(level = DEBUG) @LogMessage(level = DEBUG)
@Message(value = "Using dialect: %s", id = 35001) @Message(value = "Using dialect: %s", id = 35001)
void usingDialect(Dialect dialect); void usingDialect(Dialect dialect);

View File

@ -34,9 +34,6 @@ public interface JdbcLogging extends BasicLogger {
Logger JDBC_LOGGER = Logger.getLogger( NAME ); Logger JDBC_LOGGER = Logger.getLogger( NAME );
JdbcLogging JDBC_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcLogging.class, NAME ); JdbcLogging JDBC_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcLogging.class, NAME );
boolean JDBC_TRACE_ENABLED = JDBC_LOGGER.isTraceEnabled();
boolean JDBC_DEBUG_ENABLED = JDBC_LOGGER.isDebugEnabled();
@LogMessage(level = WARN) @LogMessage(level = WARN)
@Message( @Message(
id=100001, id=100001,

View File

@ -35,9 +35,6 @@ public interface JdbcBatchLogging extends BasicLogger {
Logger BATCH_LOGGER = Logger.getLogger( NAME ); Logger BATCH_LOGGER = Logger.getLogger( NAME );
JdbcBatchLogging BATCH_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcBatchLogging.class, NAME ); JdbcBatchLogging BATCH_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcBatchLogging.class, NAME );
boolean BATCH_TRACE_ENABLED = BATCH_LOGGER.isTraceEnabled();
boolean BATCH_DEBUG_ENABLED = BATCH_LOGGER.isDebugEnabled();
@LogMessage(level = ERROR) @LogMessage(level = ERROR)
@Message(id = 100501, value = "Exception executing batch [%s], SQL: %s") @Message(id = 100501, value = "Exception executing batch [%s], SQL: %s")
void unableToExecuteBatch(Exception e, String sql ); void unableToExecuteBatch(Exception e, String sql );

View File

@ -21,7 +21,6 @@ import org.hibernate.sql.model.TableMapping;
import org.hibernate.sql.model.jdbc.JdbcInsertMutation; import org.hibernate.sql.model.jdbc.JdbcInsertMutation;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER; import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_TRACE_ENABLED;
import static org.hibernate.jdbc.Expectations.NONE; import static org.hibernate.jdbc.Expectations.NONE;
/** /**
@ -39,7 +38,7 @@ public class BatchBuilderImpl implements BatchBuilder {
* on {@link #buildBatch} * on {@link #buildBatch}
*/ */
public BatchBuilderImpl(int globalBatchSize) { public BatchBuilderImpl(int globalBatchSize) {
if ( BATCH_TRACE_ENABLED ) { if ( BATCH_LOGGER.isTraceEnabled() ) {
BATCH_LOGGER.tracef( BATCH_LOGGER.tracef(
"Using standard BatchBuilder (%s)", "Using standard BatchBuilder (%s)",
globalBatchSize globalBatchSize

View File

@ -25,12 +25,9 @@ import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.resource.jdbc.spi.JdbcObserver; import org.hibernate.resource.jdbc.spi.JdbcObserver;
import static org.hibernate.engine.jdbc.JdbcLogging.JDBC_MESSAGE_LOGGER; import static org.hibernate.engine.jdbc.JdbcLogging.JDBC_MESSAGE_LOGGER;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_DEBUG_ENABLED;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER; import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_MESSAGE_LOGGER; import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_MESSAGE_LOGGER;
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_TRACE_ENABLED;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* Standard implementation of Batch * Standard implementation of Batch
@ -73,7 +70,7 @@ public class BatchImpl implements Batch {
this.batchSizeToUse = batchSizeToUse; this.batchSizeToUse = batchSizeToUse;
if ( BATCH_TRACE_ENABLED ) { if ( BATCH_LOGGER.isTraceEnabled() ) {
BATCH_LOGGER.tracef( BATCH_LOGGER.tracef(
"Created Batch (%s) - `%s`", "Created Batch (%s) - `%s`",
batchSizeToUse, batchSizeToUse,
@ -99,7 +96,8 @@ public class BatchImpl implements Batch {
@Override @Override
public void addToBatch(JdbcValueBindings jdbcValueBindings, TableInclusionChecker inclusionChecker) { public void addToBatch(JdbcValueBindings jdbcValueBindings, TableInclusionChecker inclusionChecker) {
if ( BATCH_TRACE_ENABLED ) { final boolean loggerTraceEnabled = BATCH_LOGGER.isTraceEnabled();
if ( loggerTraceEnabled ) {
BATCH_LOGGER.tracef( BATCH_LOGGER.tracef(
"Adding to JDBC batch (%s) - `%s`", "Adding to JDBC batch (%s) - `%s`",
batchPosition + 1, batchPosition + 1,
@ -110,7 +108,7 @@ public class BatchImpl implements Batch {
try { try {
getStatementGroup().forEachStatement( (tableName, statementDetails) -> { getStatementGroup().forEachStatement( (tableName, statementDetails) -> {
if ( inclusionChecker != null && !inclusionChecker.include( statementDetails.getMutatingTableDetails() ) ) { if ( inclusionChecker != null && !inclusionChecker.include( statementDetails.getMutatingTableDetails() ) ) {
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( loggerTraceEnabled ) {
MODEL_MUTATION_LOGGER.tracef( MODEL_MUTATION_LOGGER.tracef(
"Skipping addBatch for table : %s (batch-position=%s)", "Skipping addBatch for table : %s (batch-position=%s)",
statementDetails.getMutatingTableDetails().getTableName(), statementDetails.getMutatingTableDetails().getTableName(),
@ -229,7 +227,7 @@ public class BatchImpl implements Batch {
try { try {
if ( batchPosition == 0 ) { if ( batchPosition == 0 ) {
if( !batchExecuted) { if( !batchExecuted) {
if ( BATCH_DEBUG_ENABLED ) { if ( BATCH_LOGGER.isDebugEnabled() ) {
BATCH_LOGGER.debugf( BATCH_LOGGER.debugf(
"No batched statements to execute - %s", "No batched statements to execute - %s",
getKey().toLoggableString() getKey().toLoggableString()
@ -247,7 +245,7 @@ public class BatchImpl implements Batch {
} }
protected void performExecution() { protected void performExecution() {
if ( BATCH_TRACE_ENABLED ) { if ( BATCH_LOGGER.isTraceEnabled() ) {
BATCH_LOGGER.tracef( BATCH_LOGGER.tracef(
"Executing JDBC batch (%s / %s) - `%s`", "Executing JDBC batch (%s / %s) - `%s`",
batchPosition, batchPosition,

View File

@ -34,9 +34,6 @@ public interface LobCreationLogging extends BasicLogger {
Logger LOB_LOGGER = Logger.getLogger( NAME ); Logger LOB_LOGGER = Logger.getLogger( NAME );
LobCreationLogging LOB_MESSAGE_LOGGER = Logger.getMessageLogger( LobCreationLogging.class, NAME ); LobCreationLogging LOB_MESSAGE_LOGGER = Logger.getMessageLogger( LobCreationLogging.class, NAME );
boolean LOB_TRACE_ENABLED = LOB_LOGGER.isTraceEnabled();
boolean LOB_DEBUG_ENABLED = LOB_LOGGER.isDebugEnabled();
@LogMessage(level = DEBUG) @LogMessage(level = DEBUG)
@Message(value = "Disabling contextual LOB creation as %s is true", id = 10010001) @Message(value = "Disabling contextual LOB creation as %s is true", id = 10010001)
void disablingContextualLOBCreation(String settingName); void disablingContextualLOBCreation(String settingName);

View File

@ -19,7 +19,6 @@ import org.hibernate.sql.model.TableMapping;
import org.hibernate.sql.model.ValuesAnalysis; import org.hibernate.sql.model.ValuesAnalysis;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* Base support for MutationExecutor implementations * Base support for MutationExecutor implementations
@ -89,7 +88,7 @@ public abstract class AbstractMutationExecutor implements MutationExecutor {
final TableMapping tableDetails = statementDetails.getMutatingTableDetails(); final TableMapping tableDetails = statementDetails.getMutatingTableDetails();
if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) { if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) {
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( MODEL_MUTATION_LOGGER.tracef(
"Skipping execution of secondary insert : %s", "Skipping execution of secondary insert : %s",
tableDetails.getTableName() tableDetails.getTableName()

View File

@ -31,7 +31,6 @@ import org.hibernate.sql.model.ValuesAnalysis;
import org.hibernate.sql.model.jdbc.JdbcValueDescriptor; import org.hibernate.sql.model.jdbc.JdbcValueDescriptor;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* Specialized executor for the case of more than one table operation, with the * Specialized executor for the case of more than one table operation, with the
@ -141,7 +140,7 @@ public class MutationExecutorPostInsert implements MutationExecutor, JdbcValueBi
final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate(); final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate();
final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session ); final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session );
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( MODEL_MUTATION_LOGGER.tracef(
"Post-insert generated value : `%s` (%s)", "Post-insert generated value : `%s` (%s)",
id, id,
@ -178,7 +177,7 @@ public class MutationExecutorPostInsert implements MutationExecutor, JdbcValueBi
assert !tableDetails.isIdentifierTable(); assert !tableDetails.isIdentifierTable();
if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) { if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) {
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( MODEL_MUTATION_LOGGER.tracef(
"Skipping execution of secondary insert : %s", "Skipping execution of secondary insert : %s",
tableDetails.getTableName() tableDetails.getTableName()

View File

@ -25,7 +25,6 @@ import org.hibernate.sql.model.jdbc.JdbcValueDescriptor;
import static org.hibernate.engine.jdbc.mutation.internal.ModelMutationHelper.identityPreparation; import static org.hibernate.engine.jdbc.mutation.internal.ModelMutationHelper.identityPreparation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* Specialized form of {@link MutationExecutorPostInsert} for cases where there * Specialized form of {@link MutationExecutorPostInsert} for cases where there
@ -100,7 +99,7 @@ public class MutationExecutorPostInsertSingleTable implements MutationExecutor,
final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate(); final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate();
final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session ); final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session );
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( MODEL_MUTATION_LOGGER.tracef(
"Post-insert generated value : `%s` (%s)", "Post-insert generated value : `%s` (%s)",
id, id,

View File

@ -21,7 +21,4 @@ public interface GraphParserLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".graph.parsing"; String LOGGER_NAME = SubSystemLogging.BASE + ".graph.parsing";
Logger PARSING_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger PARSING_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean DEBUG_ENABLED = PARSING_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = PARSING_LOGGER.isTraceEnabled();
} }

View File

@ -20,7 +20,4 @@ import org.jboss.logging.Logger;
public interface IdGenFactoryLogging { public interface IdGenFactoryLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".idgen.factory"; String LOGGER_NAME = SubSystemLogging.BASE + ".idgen.factory";
Logger ID_GEN_FAC_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger ID_GEN_FAC_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean IS_TRACE_ENABLE = ID_GEN_FAC_LOGGER.isTraceEnabled();
boolean IS_DEBUG_ENABLE = ID_GEN_FAC_LOGGER.isDebugEnabled();
} }

View File

@ -37,9 +37,6 @@ public interface UrlMessageBundle {
Logger URL_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger URL_LOGGER = Logger.getLogger( LOGGER_NAME );
UrlMessageBundle URL_MESSAGE_LOGGER = Logger.getMessageLogger( UrlMessageBundle.class, LOGGER_NAME ); UrlMessageBundle URL_MESSAGE_LOGGER = Logger.getMessageLogger( UrlMessageBundle.class, LOGGER_NAME );
boolean DEBUG_ENABLED = URL_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = URL_LOGGER.isTraceEnabled();
/** /**
* Logs a warning about a malformed URL, caused by a {@link URISyntaxException} * Logs a warning about a malformed URL, caused by a {@link URISyntaxException}
* *

View File

@ -26,7 +26,4 @@ public interface LoaderLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".loader"; String LOGGER_NAME = SubSystemLogging.BASE + ".loader";
Logger LOADER_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger LOADER_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean DEBUG_ENABLED = LOADER_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = LOADER_LOGGER.isTraceEnabled();
} }

View File

@ -17,7 +17,6 @@ import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.sql.results.internal.ResultsHelper; import org.hibernate.sql.results.internal.ResultsHelper;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER; import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/** /**
@ -73,7 +72,7 @@ public abstract class AbstractCollectionBatchLoader implements CollectionBatchLo
return; return;
} }
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Finishing initializing batch-fetched collection : %s.%s", attributeMapping.getNavigableRole().getFullPath(), key ); MULTI_KEY_LOAD_LOGGER.debugf( "Finishing initializing batch-fetched collection : %s.%s", attributeMapping.getNavigableRole().getFullPath(), key );
} }

View File

@ -20,7 +20,6 @@ import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.ForeignKeyDescriptor; import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.JdbcMapping; import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.internal.SimpleForeignKeyDescriptor;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter; import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement; import org.hibernate.sql.ast.tree.select.SelectStatement;
@ -34,7 +33,6 @@ import org.hibernate.sql.results.internal.RowTransformerStandardImpl;
import org.hibernate.sql.results.spi.ListResultsConsumer; import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.type.BasicType; import org.hibernate.type.BasicType;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER; import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/** /**
@ -58,7 +56,7 @@ public class CollectionBatchLoaderArrayParam
SessionFactoryImplementor sessionFactory) { SessionFactoryImplementor sessionFactory) {
super( domainBatchSize, loadQueryInfluencers, attributeMapping, sessionFactory ); super( domainBatchSize, loadQueryInfluencers, attributeMapping, sessionFactory );
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Using ARRAY batch fetching strategy for collection `%s` : %s", "Using ARRAY batch fetching strategy for collection `%s` : %s",
attributeMapping.getNavigableRole().getFullPath(), attributeMapping.getNavigableRole().getFullPath(),
@ -100,7 +98,7 @@ public class CollectionBatchLoaderArrayParam
@Override @Override
public PersistentCollection<?> load(Object key, SharedSessionContractImplementor session) { public PersistentCollection<?> load(Object key, SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getNavigableRole().getFullPath(), key ); MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getNavigableRole().getFullPath(), key );
} }
final ForeignKeyDescriptor keyDescriptor = getLoadable().getKeyDescriptor(); final ForeignKeyDescriptor keyDescriptor = getLoadable().getKeyDescriptor();

View File

@ -23,13 +23,11 @@ import org.hibernate.loader.ast.spi.CollectionBatchLoader;
import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader; import org.hibernate.loader.ast.spi.SqlArrayMultiKeyLoader;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.select.SelectStatement; import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect; import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
import org.hibernate.sql.exec.spi.JdbcParameterBindings; import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.exec.spi.JdbcParametersList; import org.hibernate.sql.exec.spi.JdbcParametersList;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER; import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/** /**
@ -60,7 +58,7 @@ public class CollectionBatchLoaderInPredicate
.getDialect() .getDialect()
.getBatchLoadSizingStrategy() .getBatchLoadSizingStrategy()
.determineOptimalBatchLoadSize( keyColumnCount, domainBatchSize, false ); .determineOptimalBatchLoadSize( keyColumnCount, domainBatchSize, false );
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Using IN-predicate batch fetching strategy for collection `%s` : %s (%s)", "Using IN-predicate batch fetching strategy for collection `%s` : %s (%s)",
attributeMapping.getNavigableRole().getFullPath(), attributeMapping.getNavigableRole().getFullPath(),
@ -95,7 +93,7 @@ public class CollectionBatchLoaderInPredicate
public PersistentCollection<?> load( public PersistentCollection<?> load(
Object key, Object key,
SharedSessionContractImplementor session) { SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Loading collection `%s#%s` by batch-fetch", getLoadable().getNavigableRole().getFullPath(), key ); MULTI_KEY_LOAD_LOGGER.debugf( "Loading collection `%s#%s` by batch-fetch", getLoadable().getNavigableRole().getFullPath(), key );
} }
@ -139,7 +137,8 @@ public class CollectionBatchLoaderInPredicate
T[] keysToInitialize, T[] keysToInitialize,
int nonNullKeysToInitializeCount, int nonNullKeysToInitializeCount,
SharedSessionContractImplementor session) { SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { final boolean loggerDebugEnabled = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
if ( loggerDebugEnabled ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Collection keys to batch-fetch initialize (`%s#%s`) %s", "Collection keys to batch-fetch initialize (`%s#%s`) %s",
getLoadable().getNavigableRole().getFullPath(), getLoadable().getNavigableRole().getFullPath(),
@ -175,7 +174,7 @@ public class CollectionBatchLoaderInPredicate
(key1, relativePosition, absolutePosition) -> { (key1, relativePosition, absolutePosition) -> {
}, },
(startIndex) -> { (startIndex) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Processing collection batch-fetch chunk (`%s#%s`) %s - %s", "Processing collection batch-fetch chunk (`%s#%s`) %s - %s",
getLoadable().getNavigableRole().getFullPath(), getLoadable().getNavigableRole().getFullPath(),
@ -186,7 +185,7 @@ public class CollectionBatchLoaderInPredicate
} }
}, },
(startIndex, nonNullElementCount) -> { (startIndex, nonNullElementCount) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Finishing collection batch-fetch chunk (`%s#%s`) %s - %s (%s)", "Finishing collection batch-fetch chunk (`%s#%s`) %s - %s (%s)",
getLoadable().getNavigableRole().getFullPath(), getLoadable().getNavigableRole().getFullPath(),

View File

@ -30,7 +30,6 @@ import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId; import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId;
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.trimIdBatch; import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.trimIdBatch;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER; import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/** /**
@ -70,7 +69,7 @@ public class EntityBatchLoaderArrayParam<T>
super( entityDescriptor, sessionFactory ); super( entityDescriptor, sessionFactory );
this.domainBatchSize = domainBatchSize; this.domainBatchSize = domainBatchSize;
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Batch fetching enabled for `%s` (entity) using ARRAY strategy : %s", "Batch fetching enabled for `%s` (entity) using ARRAY strategy : %s",
entityDescriptor.getEntityName(), entityDescriptor.getEntityName(),
@ -91,7 +90,7 @@ public class EntityBatchLoaderArrayParam<T>
LockOptions lockOptions, LockOptions lockOptions,
Boolean readOnly, Boolean readOnly,
SharedSessionContractImplementor session) { SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching entity `%s#%s`", getLoadable().getEntityName(), pkValue ); MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching entity `%s#%s`", getLoadable().getEntityName(), pkValue );
} }

View File

@ -32,7 +32,6 @@ import org.hibernate.sql.results.spi.ListResultsConsumer;
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList; import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId; import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER; import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
/** /**
@ -68,7 +67,7 @@ public class EntityBatchLoaderInPredicate<T>
this.domainBatchSize = domainBatchSize; this.domainBatchSize = domainBatchSize;
this.sqlBatchSize = sqlBatchSize; this.sqlBatchSize = sqlBatchSize;
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Batch fetching `%s` entity using padded IN-list : %s (%s)", "Batch fetching `%s` entity using padded IN-list : %s (%s)",
entityDescriptor.getEntityName(), entityDescriptor.getEntityName(),
@ -99,7 +98,8 @@ public class EntityBatchLoaderInPredicate<T>
LockOptions lockOptions, LockOptions lockOptions,
Boolean readOnly, Boolean readOnly,
SharedSessionContractImplementor session) { SharedSessionContractImplementor session) {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { final boolean loggerDebugEnabled = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
if ( loggerDebugEnabled ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getEntityName(), pkValue ); MULTI_KEY_LOAD_LOGGER.debugf( "Batch loading entity `%s#%s`", getLoadable().getEntityName(), pkValue );
} }
@ -107,7 +107,7 @@ public class EntityBatchLoaderInPredicate<T>
if ( hasSingleId( idsToInitialize ) || lockOptions.getLockMode() != LockMode.NONE ) { if ( hasSingleId( idsToInitialize ) || lockOptions.getLockMode() != LockMode.NONE ) {
return singleIdLoader.load( pkValue, entityInstance, lockOptions, readOnly, session ); return singleIdLoader.load( pkValue, entityInstance, lockOptions, readOnly, session );
} }
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
MULTI_KEY_LOAD_LOGGER.debugf( "Ids to batch-fetch initialize (`%s#%s`) %s", getLoadable().getEntityName(), pkValue, Arrays.toString(idsToInitialize) ); MULTI_KEY_LOAD_LOGGER.debugf( "Ids to batch-fetch initialize (`%s#%s`) %s", getLoadable().getEntityName(), pkValue, Arrays.toString(idsToInitialize) );
} }
@ -172,7 +172,7 @@ public class EntityBatchLoaderInPredicate<T>
} }
}, },
(startIndex) -> { (startIndex) -> {
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) { if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
MULTI_KEY_LOAD_LOGGER.debugf( MULTI_KEY_LOAD_LOGGER.debugf(
"Processing entity batch-fetch chunk (`%s#%s`) %s - %s", "Processing entity batch-fetch chunk (`%s#%s`) %s - %s",
getLoadable().getEntityName(), getLoadable().getEntityName(),

View File

@ -7,7 +7,6 @@
package org.hibernate.loader.ast.internal; package org.hibernate.loader.ast.internal;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.Collections;
import java.util.List; import java.util.List;
import org.hibernate.LockMode; import org.hibernate.LockMode;
@ -64,7 +63,7 @@ public class LoaderHelper {
final EntityPersister persister = entry.getPersister(); final EntityPersister persister = entry.getPersister();
if ( LoaderLogging.TRACE_ENABLED ) { if ( LoaderLogging.LOADER_LOGGER.isTraceEnabled() ) {
LoaderLogging.LOADER_LOGGER.tracef( LoaderLogging.LOADER_LOGGER.tracef(
"Locking `%s( %s )` in `%s` lock-mode", "Locking `%s( %s )` in `%s` lock-mode",
persister.getEntityName(), persister.getEntityName(),

View File

@ -66,7 +66,7 @@ public class MultiIdEntityLoaderArrayParam<E> extends AbstractMultiIdEntityLoade
@Override @Override
protected <K> List<E> performOrderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session) { protected <K> List<E> performOrderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session) {
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) { if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
"MultiIdEntityLoaderArrayParam#performOrderedMultiLoad - %s", "MultiIdEntityLoaderArrayParam#performOrderedMultiLoad - %s",
getLoadable().getEntityName() getLoadable().getEntityName()
@ -222,7 +222,7 @@ public class MultiIdEntityLoaderArrayParam<E> extends AbstractMultiIdEntityLoade
K[] ids, K[] ids,
MultiIdLoadOptions loadOptions, MultiIdLoadOptions loadOptions,
EventSource session) { EventSource session) {
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) { if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
"MultiIdEntityLoaderArrayParam#performUnorderedMultiLoad - %s", "MultiIdEntityLoaderArrayParam#performUnorderedMultiLoad - %s",
getLoadable().getEntityName() getLoadable().getEntityName()

View File

@ -28,7 +28,4 @@ public interface MultiKeyLoadLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".loader.multi"; String LOGGER_NAME = SubSystemLogging.BASE + ".loader.multi";
Logger MULTI_KEY_LOAD_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger MULTI_KEY_LOAD_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean MULTI_KEY_LOAD_TRACE_ENABLED = MULTI_KEY_LOAD_LOGGER.isTraceEnabled();
boolean MULTI_KEY_LOAD_DEBUG_ENABLED = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
} }

View File

@ -68,7 +68,7 @@ public class MultiNaturalIdLoaderArrayParam<E> implements MultiNaturalIdLoader<E
return Collections.emptyList(); return Collections.emptyList();
} }
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) { if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( "MultiNaturalIdLoaderArrayParam#multiLoadStarting - `%s`", entityDescriptor.getEntityName() ); MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( "MultiNaturalIdLoaderArrayParam#multiLoadStarting - `%s`", entityDescriptor.getEntityName() );
} }

View File

@ -101,13 +101,14 @@ public abstract class BaseNaturalIdLoadAccessImpl<T> implements NaturalIdLoadOpt
final PersistenceContext persistenceContext = context.getSession().getPersistenceContextInternal(); final PersistenceContext persistenceContext = context.getSession().getPersistenceContextInternal();
final Collection<?> cachedPkResolutions = persistenceContext.getNaturalIdResolutions().getCachedPkResolutions( entityPersister() ); final Collection<?> cachedPkResolutions = persistenceContext.getNaturalIdResolutions().getCachedPkResolutions( entityPersister() );
final boolean loggerDebugEnabled = LoaderLogging.LOADER_LOGGER.isDebugEnabled();
for ( Object pk : cachedPkResolutions ) { for ( Object pk : cachedPkResolutions ) {
final EntityKey entityKey = context.getSession().generateEntityKey( pk, entityPersister() ); final EntityKey entityKey = context.getSession().generateEntityKey( pk, entityPersister() );
final Object entity = persistenceContext.getEntity( entityKey ); final Object entity = persistenceContext.getEntity( entityKey );
final EntityEntry entry = persistenceContext.getEntry( entity ); final EntityEntry entry = persistenceContext.getEntry( entity );
if ( entry == null ) { if ( entry == null ) {
if ( LoaderLogging.DEBUG_ENABLED ) { if ( loggerDebugEnabled ) {
LoaderLogging.LOADER_LOGGER.debugf( LoaderLogging.LOADER_LOGGER.debugf(
"Cached natural-id/pk resolution linked to null EntityEntry in persistence context : %s#%s", "Cached natural-id/pk resolution linked to null EntityEntry in persistence context : %s#%s",
entityDescriptor.getEntityName(), entityDescriptor.getEntityName(),

View File

@ -29,7 +29,4 @@ public interface MappingModelCreationLogging extends BasicLogger {
Logger MAPPING_MODEL_CREATION_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger MAPPING_MODEL_CREATION_LOGGER = Logger.getLogger( LOGGER_NAME );
MappingModelCreationLogging MAPPING_MODEL_CREATION_MESSAGE_LOGGER = Logger.getMessageLogger( MappingModelCreationLogging.class, LOGGER_NAME ); MappingModelCreationLogging MAPPING_MODEL_CREATION_MESSAGE_LOGGER = Logger.getMessageLogger( MappingModelCreationLogging.class, LOGGER_NAME );
boolean MAPPING_MODEL_CREATION_TRACE_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isTraceEnabled();
boolean MAPPING_MODEL_CREATION_DEBUG_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isDebugEnabled();
} }

View File

@ -22,7 +22,4 @@ import org.jboss.logging.Logger;
public interface NaturalIdLogging { public interface NaturalIdLogging {
String LOGGER_NAME = SubSystemLogging.BASE + ".mapping.natural_id"; String LOGGER_NAME = SubSystemLogging.BASE + ".mapping.natural_id";
Logger NATURAL_ID_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger NATURAL_ID_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean DEBUG_ENABLED = NATURAL_ID_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = NATURAL_ID_LOGGER.isTraceEnabled();
} }

View File

@ -253,7 +253,7 @@ public class BasicValuedCollectionPart
boolean selected, boolean selected,
String resultVariable, String resultVariable,
DomainResultCreationState creationState) { DomainResultCreationState creationState) {
if ( ResultsLogger.DEBUG_ENABLED ) { if ( ResultsLogger.RESULTS_LOGGER.isDebugEnabled() ) {
ResultsLogger.RESULTS_LOGGER.debugf( ResultsLogger.RESULTS_LOGGER.debugf(
"Generating Fetch for collection-part : `%s` -> `%s`", "Generating Fetch for collection-part : `%s` -> `%s`",
collectionDescriptor.getRole(), collectionDescriptor.getRole(),

View File

@ -101,7 +101,6 @@ import org.hibernate.type.descriptor.java.MutabilityPlan;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry; import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_DEBUG_ENABLED;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER; import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey; import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey;
@ -1707,7 +1706,7 @@ public class MappingModelCreationHelper {
|| value instanceof ManyToOne && value.isNullable() && ( (ManyToOne) value ).isIgnoreNotFound() ) { || value instanceof ManyToOne && value.isNullable() && ( (ManyToOne) value ).isIgnoreNotFound() ) {
fetchTiming = FetchTiming.IMMEDIATE; fetchTiming = FetchTiming.IMMEDIATE;
if ( lazy ) { if ( lazy ) {
if ( MAPPING_MODEL_CREATION_DEBUG_ENABLED ) { if ( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.isDebugEnabled() ) {
MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
"Forcing FetchTiming.IMMEDIATE for to-one association : %s.%s", "Forcing FetchTiming.IMMEDIATE for to-one association : %s.%s",
declaringType.getNavigableRole(), declaringType.getNavigableRole(),

View File

@ -22,7 +22,6 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER; import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_TRACE_ENABLED;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -122,7 +121,7 @@ public class MappingModelCreationProcess {
exceptions.put( callbackEntry, e ); exceptions.put( callbackEntry, e );
final String format = "Mapping-model creation encountered (possibly) transient error : %s"; final String format = "Mapping-model creation encountered (possibly) transient error : %s";
if ( MAPPING_MODEL_CREATION_TRACE_ENABLED ) { if ( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.isTraceEnabled() ) {
MAPPING_MODEL_CREATION_MESSAGE_LOGGER.tracef( e, format, e ); MAPPING_MODEL_CREATION_MESSAGE_LOGGER.tracef( e, format, e );
} }
else { else {

View File

@ -642,7 +642,7 @@ public abstract class AbstractCollectionPersister
} }
protected void logStaticSQL() { protected void logStaticSQL() {
if ( !ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( !ModelMutationLogging.MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
return; return;
} }

View File

@ -57,7 +57,6 @@ import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import org.hibernate.sql.model.jdbc.JdbcUpdateMutation; import org.hibernate.sql.model.jdbc.JdbcUpdateMutation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
/** /**
* A {@link CollectionPersister} for {@linkplain jakarta.persistence.ElementCollection * A {@link CollectionPersister} for {@linkplain jakarta.persistence.ElementCollection
@ -155,7 +154,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
&& !isInverse(); && !isInverse();
if ( !performUpdates ) { if ( !performUpdates ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection row updates - %s", "Skipping collection row updates - %s",
getRolePath() getRolePath()
@ -169,7 +168,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
private InsertRowsCoordinator buildInsertRowCoordinator() { private InsertRowsCoordinator buildInsertRowCoordinator() {
if ( isInverse() || !isRowInsertEnabled() ) { if ( isInverse() || !isRowInsertEnabled() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection inserts - %s", "Skipping collection inserts - %s",
getRolePath() getRolePath()
@ -183,7 +182,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
private DeleteRowsCoordinator buildDeleteRowCoordinator() { private DeleteRowsCoordinator buildDeleteRowCoordinator() {
if ( ! needsRemove() ) { if ( ! needsRemove() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection row deletions - %s", "Skipping collection row deletions - %s",
getRolePath() getRolePath()
@ -197,7 +196,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
private RemoveCoordinator buildDeleteAllCoordinator() { private RemoveCoordinator buildDeleteAllCoordinator() {
if ( ! needsRemove() ) { if ( ! needsRemove() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection removals - %s", "Skipping collection removals - %s",
getRolePath() getRolePath()

View File

@ -82,7 +82,6 @@ import org.hibernate.sql.model.jdbc.JdbcUpdateMutation;
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList; import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
import static org.hibernate.sql.model.ast.builder.TableUpdateBuilder.NULL; import static org.hibernate.sql.model.ast.builder.TableUpdateBuilder.NULL;
/** /**
@ -441,7 +440,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
private InsertRowsCoordinator buildInsertCoordinator() { private InsertRowsCoordinator buildInsertCoordinator() {
if ( isInverse() || !isRowInsertEnabled() ) { if ( isInverse() || !isRowInsertEnabled() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection (re)creation - %s", "Skipping collection (re)creation - %s",
getRolePath() getRolePath()
@ -459,7 +458,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
private UpdateRowsCoordinator buildUpdateCoordinator() { private UpdateRowsCoordinator buildUpdateCoordinator() {
if ( !isRowDeleteEnabled() && !isRowInsertEnabled() ) { if ( !isRowDeleteEnabled() && !isRowInsertEnabled() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection row updates - %s", "Skipping collection row updates - %s",
getRolePath() getRolePath()
@ -477,7 +476,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
private DeleteRowsCoordinator buildDeleteCoordinator() { private DeleteRowsCoordinator buildDeleteCoordinator() {
if ( !needsRemove() ) { if ( !needsRemove() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection row deletions - %s", "Skipping collection row deletions - %s",
getRolePath() getRolePath()
@ -502,7 +501,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
private RemoveCoordinator buildDeleteAllCoordinator() { private RemoveCoordinator buildDeleteAllCoordinator() {
if ( ! needsRemove() ) { if ( ! needsRemove() ) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Skipping collection removals - %s", "Skipping collection removals - %s",
getRolePath() getRolePath()

View File

@ -22,7 +22,6 @@ import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation; import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -61,7 +60,7 @@ public class DeleteRowsCoordinatorStandard implements DeleteRowsCoordinator {
operationGroup = createOperationGroup(); operationGroup = createOperationGroup();
} }
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Deleting removed collection rows - %s : %s", "Deleting removed collection rows - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),

View File

@ -27,7 +27,6 @@ import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation; import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
/** /**
* OneToMany delete coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}. * OneToMany delete coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
@ -59,7 +58,7 @@ public class DeleteRowsCoordinatorTablePerSubclass implements DeleteRowsCoordina
@Override @Override
public void deleteRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session) { public void deleteRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Deleting removed collection rows - %s : %s", "Deleting removed collection rows - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),

View File

@ -22,7 +22,6 @@ import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation; import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -67,7 +66,7 @@ public class InsertRowsCoordinatorStandard implements InsertRowsCoordinator {
operationGroup = createOperationGroup(); operationGroup = createOperationGroup();
} }
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Inserting collection rows - %s : %s", "Inserting collection rows - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),

View File

@ -25,7 +25,6 @@ import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation; import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
/** /**
* OneToMany insert coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}. * OneToMany insert coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
@ -63,7 +62,8 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
Object id, Object id,
EntryFilter entryChecker, EntryFilter entryChecker,
SharedSessionContractImplementor session) { SharedSessionContractImplementor session) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { final boolean loggerDebugEnabled = MODEL_MUTATION_LOGGER.isDebugEnabled();
if ( loggerDebugEnabled ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Inserting collection rows - %s : %s", "Inserting collection rows - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),
@ -77,11 +77,13 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
final Iterator<?> entries = collection.entries( collectionDescriptor ); final Iterator<?> entries = collection.entries( collectionDescriptor );
collection.preInsert( collectionDescriptor ); collection.preInsert( collectionDescriptor );
if ( !entries.hasNext() ) { if ( !entries.hasNext() ) {
MODEL_MUTATION_LOGGER.debugf( if ( loggerDebugEnabled ) {
"No collection rows to insert - %s : %s", MODEL_MUTATION_LOGGER.debugf(
mutationTarget.getRolePath(), "No collection rows to insert - %s : %s",
id mutationTarget.getRolePath(),
); id
);
}
return; return;
} }
final MutationExecutor[] executors = new MutationExecutor[subclassEntries.length]; final MutationExecutor[] executors = new MutationExecutor[subclassEntries.length];
@ -120,7 +122,13 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
entryCount++; entryCount++;
} }
MODEL_MUTATION_LOGGER.debugf( "Done inserting `%s` collection rows : %s", entryCount, mutationTarget.getRolePath() ); if ( loggerDebugEnabled ) {
MODEL_MUTATION_LOGGER.debugf(
"Done inserting `%s` collection rows : %s",
entryCount,
mutationTarget.getRolePath()
);
}
} }
finally { finally {

View File

@ -19,8 +19,6 @@ import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation; import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* Handles complete removal of a collection by its key * Handles complete removal of a collection by its key
@ -75,7 +73,7 @@ public class RemoveCoordinatorStandard implements RemoveCoordinator {
@Override @Override
public void deleteAllRows(Object key, SharedSessionContractImplementor session) { public void deleteAllRows(Object key, SharedSessionContractImplementor session) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Deleting collection - %s : %s", "Deleting collection - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),
@ -123,7 +121,7 @@ public class RemoveCoordinatorStandard implements RemoveCoordinator {
assert mutationTarget.getTargetPart() != null; assert mutationTarget.getTargetPart() != null;
assert mutationTarget.getTargetPart().getKeyDescriptor() != null; assert mutationTarget.getTargetPart().getKeyDescriptor() != null;
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() ); MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() );
} }

View File

@ -21,8 +21,6 @@ import org.hibernate.sql.model.ast.MutatingTableReference;
import org.hibernate.sql.model.internal.MutationOperationGroupSingle; import org.hibernate.sql.model.internal.MutationOperationGroupSingle;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER; import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
/** /**
* OneToMany remove coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}. * OneToMany remove coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
@ -66,7 +64,7 @@ public class RemoveCoordinatorTablePerSubclass implements RemoveCoordinator {
@Override @Override
public void deleteAllRows(Object key, SharedSessionContractImplementor session) { public void deleteAllRows(Object key, SharedSessionContractImplementor session) {
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
MODEL_MUTATION_LOGGER.debugf( MODEL_MUTATION_LOGGER.debugf(
"Deleting collection - %s : %s", "Deleting collection - %s : %s",
mutationTarget.getRolePath(), mutationTarget.getRolePath(),
@ -129,7 +127,7 @@ public class RemoveCoordinatorTablePerSubclass implements RemoveCoordinator {
assert mutationTarget.getTargetPart() != null; assert mutationTarget.getTargetPart() != null;
assert mutationTarget.getTargetPart().getKeyDescriptor() != null; assert mutationTarget.getTargetPart().getKeyDescriptor() != null;
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) { if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() ); MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() );
} }

View File

@ -36,9 +36,6 @@ public interface QueryLogging extends BasicLogger {
Logger QUERY_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger QUERY_LOGGER = Logger.getLogger( LOGGER_NAME );
QueryLogging QUERY_MESSAGE_LOGGER = Logger.getMessageLogger( QueryLogging.class, LOGGER_NAME ); QueryLogging QUERY_MESSAGE_LOGGER = Logger.getMessageLogger( QueryLogging.class, LOGGER_NAME );
boolean TRACE_ENABLED = QUERY_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = QUERY_LOGGER.isDebugEnabled();
static String subLoggerName(String subName) { static String subLoggerName(String subName) {
return LOGGER_NAME + '.' + subName; return LOGGER_NAME + '.' + subName;
} }

View File

@ -34,9 +34,6 @@ public interface HqlLogging extends BasicLogger {
HqlLogging QUERY_LOGGER = Logger.getMessageLogger( HqlLogging.class, LOGGER_NAME ); HqlLogging QUERY_LOGGER = Logger.getMessageLogger( HqlLogging.class, LOGGER_NAME );
boolean TRACE_ENABLED = QUERY_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = QUERY_LOGGER.isDebugEnabled();
static String subLoggerName(String subName) { static String subLoggerName(String subName) {
return LOGGER_NAME + '.' + subName; return LOGGER_NAME + '.' + subName;
} }

View File

@ -124,9 +124,7 @@ public class DomainResultCreationStateImpl
} }
public void disallowPositionalSelections() { public void disallowPositionalSelections() {
if ( ResultsLogger.DEBUG_ENABLED ) { ResultsLogger.RESULTS_LOGGER.debugf( "Disallowing positional selections : %s", stateIdentifier );
ResultsLogger.RESULTS_LOGGER.debugf( "Disallowing positional selections : %s", stateIdentifier );
}
this.allowPositionalSelections = false; this.allowPositionalSelections = false;
} }

View File

@ -21,7 +21,4 @@ public interface SqlTreeCreationLogger {
String LOGGER_NAME = SubSystemLogging.BASE + ".sql.ast.create"; String LOGGER_NAME = SubSystemLogging.BASE + ".sql.ast.create";
Logger LOGGER = Logger.getLogger( LOGGER_NAME ); Logger LOGGER = Logger.getLogger( LOGGER_NAME );
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
} }

View File

@ -36,7 +36,7 @@ import org.hibernate.sql.ast.tree.update.UpdateStatement;
*/ */
public class SqlTreePrinter { public class SqlTreePrinter {
public static void logSqlAst(Statement sqlAstStatement) { public static void logSqlAst(Statement sqlAstStatement) {
if ( ! SqlAstTreeLogger.DEBUG_ENABLED ) { if ( ! SqlAstTreeLogger.INSTANCE.isDebugEnabled() ) {
return; return;
} }

View File

@ -30,9 +30,7 @@ public class SqlAliasBaseImpl implements SqlAliasBase {
public String generateNewAlias() { public String generateNewAlias() {
synchronized (this) { synchronized (this) {
final String alias = stem + "_" + ( aliasCount++ ); final String alias = stem + "_" + ( aliasCount++ );
if ( SqlTreeCreationLogger.DEBUG_ENABLED ) { SqlTreeCreationLogger.LOGGER.debugf( "Created new SQL alias : %s", alias );
SqlTreeCreationLogger.LOGGER.debugf( "Created new SQL alias : %s", alias );
}
return alias; return alias;
} }
} }

View File

@ -33,6 +33,4 @@ public interface SqlAstTreeLogger extends BasicLogger {
*/ */
SqlAstTreeLogger INSTANCE = Logger.getMessageLogger( SqlAstTreeLogger.class, LOGGER_NAME ); SqlAstTreeLogger INSTANCE = Logger.getMessageLogger( SqlAstTreeLogger.class, LOGGER_NAME );
boolean DEBUG_ENABLED = INSTANCE.isDebugEnabled();
boolean TRACE_ENABLED = INSTANCE.isTraceEnabled();
} }

View File

@ -24,7 +24,4 @@ public final class ModelMutationLogging {
public static final String NAME = SubSystemLogging.BASE + ".jdbc.mutation"; public static final String NAME = SubSystemLogging.BASE + ".jdbc.mutation";
public static final Logger MODEL_MUTATION_LOGGER = Logger.getLogger( NAME ); public static final Logger MODEL_MUTATION_LOGGER = Logger.getLogger( NAME );
public static final boolean MODEL_MUTATION_LOGGER_TRACE_ENABLED = MODEL_MUTATION_LOGGER.isTraceEnabled();
public static final boolean MODEL_MUTATION_LOGGER_DEBUG_ENABLED = MODEL_MUTATION_LOGGER.isDebugEnabled();
} }

View File

@ -38,9 +38,6 @@ public interface ResultsLogger extends BasicLogger {
// todo (6.0) : make sure sql result processing classes use this logger // todo (6.0) : make sure sql result processing classes use this logger
boolean TRACE_ENABLED = RESULTS_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = RESULTS_LOGGER.isDebugEnabled();
static String subLoggerName(String subName) { static String subLoggerName(String subName) {
return LOGGER_NAME + "." + subName; return LOGGER_NAME + "." + subName;
} }

View File

@ -17,7 +17,6 @@ import org.hibernate.sql.results.ResultsLogger;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.Logging.AST_LOGGER; import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.Logging.AST_LOGGER;
import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.Logging.TRACE_ENABLED;
/** /**
* Printer for DomainResult graphs * Printer for DomainResult graphs
@ -32,8 +31,6 @@ public class DomainResultGraphPrinter {
interface Logging { interface Logging {
String LOGGER_NAME = ResultsLogger.LOGGER_NAME + ".graph.AST"; String LOGGER_NAME = ResultsLogger.LOGGER_NAME + ".graph.AST";
Logger AST_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger AST_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean DEBUG_ENABLED = AST_LOGGER.isDebugEnabled();
boolean TRACE_ENABLED = AST_LOGGER.isTraceEnabled();
} }
public static void logDomainResultGraph(List<DomainResult<?>> domainResults) { public static void logDomainResultGraph(List<DomainResult<?>> domainResults) {
@ -41,7 +38,7 @@ public class DomainResultGraphPrinter {
} }
public static void logDomainResultGraph(String header, List<DomainResult<?>> domainResults) { public static void logDomainResultGraph(String header, List<DomainResult<?>> domainResults) {
if ( ! Logging.DEBUG_ENABLED ) { if ( !AST_LOGGER.isDebugEnabled() ) {
return; return;
} }
@ -69,7 +66,7 @@ public class DomainResultGraphPrinter {
AST_LOGGER.debug( buffer.toString() ); AST_LOGGER.debug( buffer.toString() );
if ( TRACE_ENABLED ) { if ( AST_LOGGER.isTraceEnabled() ) {
AST_LOGGER.tracef( new Exception(), "Stack trace calling DomainResultGraphPrinter" ); AST_LOGGER.tracef( new Exception(), "Stack trace calling DomainResultGraphPrinter" );
} }
} }

View File

@ -58,9 +58,7 @@ public class BasicResultAssembler<J> implements DomainResultAssembler<J> {
JdbcValuesSourceProcessingOptions options) { JdbcValuesSourceProcessingOptions options) {
final Object jdbcValue = extractRawValue( rowProcessingState ); final Object jdbcValue = extractRawValue( rowProcessingState );
if ( ResultsLogger.DEBUG_ENABLED ) { ResultsLogger.RESULTS_LOGGER.debugf( "Extracted JDBC value [%d] - [%s]", valuesArrayPosition, jdbcValue );
ResultsLogger.RESULTS_LOGGER.debugf( "Extracted JDBC value [%d] - [%s]", valuesArrayPosition, jdbcValue );
}
if ( valueConverter != null ) { if ( valueConverter != null ) {
if ( jdbcValue != null ) { if ( jdbcValue != null ) {

View File

@ -26,7 +26,4 @@ public interface CollectionLoadingLogger extends BasicLogger {
* Static access to the logging instance * Static access to the logging instance
*/ */
Logger COLL_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME ); Logger COLL_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME );
boolean TRACE_ENABLED = COLL_LOAD_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = COLL_LOAD_LOGGER.isDebugEnabled();
} }

View File

@ -80,7 +80,7 @@ public abstract class AbstractCollectionInitializer implements CollectionInitial
collectionKeyValue collectionKeyValue
); );
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
CollectionLoadingLogger.COLL_LOAD_LOGGER.debugf( CollectionLoadingLogger.COLL_LOAD_LOGGER.debugf(
"(%s) Current row collection key : %s", "(%s) Current row collection key : %s",
this.getClass().getSimpleName(), this.getClass().getSimpleName(),

View File

@ -70,7 +70,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
return; return;
} }
if ( CollectionLoadingLogger.TRACE_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isTraceEnabled() ) {
COLL_LOAD_LOGGER.tracef( COLL_LOAD_LOGGER.tracef(
"(%s) Beginning Initializer#resolveInstance for collection : %s", "(%s) Beginning Initializer#resolveInstance for collection : %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -93,7 +93,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
if ( existingLoadingEntry != null ) { if ( existingLoadingEntry != null ) {
collectionInstance = existingLoadingEntry.getCollectionInstance(); collectionInstance = existingLoadingEntry.getCollectionInstance();
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Found existing loading collection entry [%s]; using loading collection instance - %s", "(%s) Found existing loading collection entry [%s]; using loading collection instance - %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -108,7 +108,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
} }
else { else {
// the entity is already being loaded elsewhere // the entity is already being loaded elsewhere
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Collection [%s] being loaded by another initializer [%s] - skipping processing", "(%s) Collection [%s] being loaded by another initializer [%s] - skipping processing",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -130,7 +130,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
// it is already initialized we have nothing to do // it is already initialized we have nothing to do
if ( collectionInstance.wasInitialized() ) { if ( collectionInstance.wasInitialized() ) {
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Found existing collection instance [%s] in Session; skipping processing - [%s]", "(%s) Found existing collection instance [%s] in Session; skipping processing - [%s]",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -155,7 +155,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
// it is already initialized we have nothing to do // it is already initialized we have nothing to do
if ( collectionInstance.wasInitialized() ) { if ( collectionInstance.wasInitialized() ) {
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Found existing unowned collection instance [%s] in Session; skipping processing - [%s]", "(%s) Found existing unowned collection instance [%s] in Session; skipping processing - [%s]",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -184,7 +184,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
session session
); );
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Created new collection wrapper [%s] : %s", "(%s) Created new collection wrapper [%s] : %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -203,7 +203,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
} }
if ( responsibility != null ) { if ( responsibility != null ) {
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Responsible for loading collection [%s] : %s", "(%s) Responsible for loading collection [%s] : %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -285,7 +285,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
if ( collectionValueKey != null ) { if ( collectionValueKey != null ) {
// the row contains an element in the collection... // the row contains an element in the collection...
if ( CollectionLoadingLogger.DEBUG_ENABLED ) { if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
COLL_LOAD_LOGGER.debugf( COLL_LOAD_LOGGER.debugf(
"(%s) Reading element from row for collection [%s] -> %s", "(%s) Reading element from row for collection [%s] -> %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),

View File

@ -31,7 +31,4 @@ public interface EmbeddableLoadingLogger extends BasicLogger {
*/ */
Logger EMBEDDED_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME ); Logger EMBEDDED_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME );
boolean TRACE_ENABLED = EMBEDDED_LOAD_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = EMBEDDED_LOAD_LOGGER.isDebugEnabled();
} }

View File

@ -273,7 +273,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
// AbstractEntityPersister#getSequentialSelect in 5.2 // AbstractEntityPersister#getSequentialSelect in 5.2
if ( entityKey == null ) { if ( entityKey == null ) {
if ( EntityLoadingLogging.TRACE_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef( EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
"(%s) Beginning Initializer#resolveKey process for entity : %s", "(%s) Beginning Initializer#resolveKey process for entity : %s",
StringHelper.collapse( this.getClass().getName() ), StringHelper.collapse( this.getClass().getName() ),
@ -296,7 +296,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
assert missing; assert missing;
} }
else { else {
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Hydrated EntityKey (%s): %s", "(%s) Hydrated EntityKey (%s): %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -481,7 +481,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
LoadingEntityEntry existingLoadingEntry, LoadingEntityEntry existingLoadingEntry,
Object entityIdentifier) { Object entityIdentifier) {
if ( EntityLoadingLogging.TRACE_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef( EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
"(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s", "(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s",
StringHelper.collapse( this.getClass().getName() ), StringHelper.collapse( this.getClass().getName() ),
@ -604,7 +604,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
private void setIsOwningInitializer(Object entityIdentifier,LoadingEntityEntry existingLoadingEntry) { private void setIsOwningInitializer(Object entityIdentifier,LoadingEntityEntry existingLoadingEntry) {
if ( existingLoadingEntry != null ) { if ( existingLoadingEntry != null ) {
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Found existing loading entry [%s] - using loading instance", "(%s) Found existing loading entry [%s] - using loading instance",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -634,7 +634,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
} }
else { else {
// the entity is already being loaded elsewhere // the entity is already being loaded elsewhere
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing", "(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -672,7 +672,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
protected Object instantiateEntity(Object entityIdentifier, SharedSessionContractImplementor session) { protected Object instantiateEntity(Object entityIdentifier, SharedSessionContractImplementor session) {
final Object instance = session.instantiate( concreteDescriptor, entityKey.getIdentifier() ); final Object instance = session.instantiate( concreteDescriptor, entityKey.getIdentifier() );
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Created new entity instance [%s] : %s", "(%s) Created new entity instance [%s] : %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -816,7 +816,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
final SharedSessionContractImplementor session = rowProcessingState.getSession(); final SharedSessionContractImplementor session = rowProcessingState.getSession();
final PersistenceContext persistenceContext = session.getPersistenceContextInternal(); final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
if ( EntityLoadingLogging.TRACE_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef( EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
"(%s) Beginning Initializer#initializeInstance process for entity %s", "(%s) Beginning Initializer#initializeInstance process for entity %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -872,7 +872,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
concreteDescriptor.afterInitialize( toInitialize, session ); concreteDescriptor.afterInitialize( toInitialize, session );
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Done materializing entityInstance : %s", "(%s) Done materializing entityInstance : %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),
@ -958,7 +958,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
EntityDataAccess cacheAccess) { EntityDataAccess cacheAccess) {
final SessionFactoryImplementor factory = session.getFactory(); final SessionFactoryImplementor factory = session.getFactory();
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%S) Adding entityInstance to second-level cache: %s", "(%S) Adding entityInstance to second-level cache: %s",
getSimpleConcreteImplName(), getSimpleConcreteImplName(),

View File

@ -22,7 +22,4 @@ import org.jboss.logging.Logger;
public interface EntityLoadingLogging { public interface EntityLoadingLogging {
String LOGGER_NAME = LoadingLogger.LOGGER_NAME + ".entity"; String LOGGER_NAME = LoadingLogger.LOGGER_NAME + ".entity";
Logger ENTITY_LOADING_LOGGER = Logger.getLogger( LOGGER_NAME ); Logger ENTITY_LOADING_LOGGER = Logger.getLogger( LOGGER_NAME );
boolean TRACE_ENABLED = ENTITY_LOADING_LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = ENTITY_LOADING_LOGGER.isDebugEnabled();
} }

View File

@ -120,7 +120,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
return; return;
} }
if ( EntityLoadingLogging.TRACE_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef( EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
"(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s", "(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s",
StringHelper.collapse( this.getClass().getName() ), StringHelper.collapse( this.getClass().getName() ),
@ -144,7 +144,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
final LoadingEntityEntry existingLoadingEntry = loadContexts.findLoadingEntityEntry( entityKey ); final LoadingEntityEntry existingLoadingEntry = loadContexts.findLoadingEntityEntry( entityKey );
if ( existingLoadingEntry != null ) { if ( existingLoadingEntry != null ) {
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Found existing loading entry [%s] - using loading instance", "(%s) Found existing loading entry [%s] - using loading instance",
CONCRETE_NAME, CONCRETE_NAME,
@ -159,7 +159,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
final EntityInitializer entityInitializer = existingLoadingEntry.getEntityInitializer(); final EntityInitializer entityInitializer = existingLoadingEntry.getEntityInitializer();
if ( entityInitializer != this ) { if ( entityInitializer != this ) {
// the entity is already being loaded elsewhere // the entity is already being loaded elsewhere
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing", "(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing",
CONCRETE_NAME, CONCRETE_NAME,
@ -180,7 +180,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
} }
} }
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Invoking session#internalLoad for entity (%s) : %s", "(%s) Invoking session#internalLoad for entity (%s) : %s",
CONCRETE_NAME, CONCRETE_NAME,
@ -206,7 +206,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
); );
} }
if ( EntityLoadingLogging.DEBUG_ENABLED ) { if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf( EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
"(%s) Entity [%s] : %s has being loaded by session.internalLoad.", "(%s) Entity [%s] : %s has being loaded by session.internalLoad.",
CONCRETE_NAME, CONCRETE_NAME,

View File

@ -141,7 +141,7 @@ public class ResultsHelper {
} }
private static void logInitializers(Map<NavigablePath, Initializer> initializerMap) { private static void logInitializers(Map<NavigablePath, Initializer> initializerMap) {
if ( ! ResultsLogger.DEBUG_ENABLED ) { if ( ! ResultsLogger.RESULTS_MESSAGE_LOGGER.isDebugEnabled() ) {
return; return;
} }
@ -241,9 +241,7 @@ public class ResultsHelper {
if ( session.getLoadQueryInfluencers().hasEnabledFilters() && collectionDescriptor.isAffectedByEnabledFilters( session ) ) { if ( session.getLoadQueryInfluencers().hasEnabledFilters() && collectionDescriptor.isAffectedByEnabledFilters( session ) ) {
// some filters affecting the collection are enabled on the session, so do not do the put into the cache. // some filters affecting the collection are enabled on the session, so do not do the put into the cache.
if ( LOG.isDebugEnabled() ) { LOG.debug( "Refusing to add to cache due to enabled filters" );
LOG.debug( "Refusing to add to cache due to enabled filters" );
}
// todo : add the notion of enabled filters to the cache key to differentiate filtered collections from non-filtered; // todo : add the notion of enabled filters to the cache key to differentiate filtered collections from non-filtered;
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from // DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
// cache with enabled filters). // cache with enabled filters).
@ -268,9 +266,7 @@ public class ResultsHelper {
} }
} }
if ( collectionOwner == null ) { if ( collectionOwner == null ) {
if ( LOG.isDebugEnabled() ) { LOG.debugf( "Unable to resolve owner of loading collection for second level caching. Refusing to add to cache.");
LOG.debugf( "Unable to resolve owner of loading collection for second level caching. Refusing to add to cache.");
}
return; return;
} }
} }

View File

@ -19,49 +19,47 @@ public interface JdbcBindingLogging {
Logger LOGGER = Logger.getLogger( NAME ); Logger LOGGER = Logger.getLogger( NAME );
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
static void logBinding(int jdbcPosition, int typeCode, Object value) { static void logBinding(int jdbcPosition, int typeCode, Object value) {
assert TRACE_ENABLED;
LOGGER.tracef( if ( LOGGER.isTraceEnabled() ) {
"binding parameter [%s] as [%s] - [%s]", LOGGER.tracef(
jdbcPosition, "binding parameter [%s] as [%s] - [%s]",
JdbcTypeNameMapper.getTypeName( typeCode ), jdbcPosition,
value JdbcTypeNameMapper.getTypeName( typeCode ),
); value
);
}
} }
static void logNullBinding(int jdbcPosition, int typeCode) { static void logNullBinding(int jdbcPosition, int typeCode) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
LOGGER.tracef(
LOGGER.tracef( "binding parameter [%s] as [%s] - [null]",
"binding parameter [%s] as [%s] - [null]", jdbcPosition,
jdbcPosition, JdbcTypeNameMapper.getTypeName( typeCode )
JdbcTypeNameMapper.getTypeName( typeCode ) );
); }
} }
static void logBinding(String callableParameterName, int typeCode, Object value) { static void logBinding(String callableParameterName, int typeCode, Object value) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
LOGGER.tracef(
LOGGER.tracef( "binding parameter [%s] as [%s] - [%s]",
"binding parameter [%s] as [%s] - [%s]", callableParameterName,
callableParameterName, JdbcTypeNameMapper.getTypeName( typeCode ),
JdbcTypeNameMapper.getTypeName( typeCode ), value
value );
); }
} }
static void logNullBinding(String callableParameterName, int typeCode) { static void logNullBinding(String callableParameterName, int typeCode) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
LOGGER.tracef(
LOGGER.tracef( "binding parameter [%s] as [%s] - [null]",
"binding parameter [%s] as [%s] - [null]", callableParameterName,
callableParameterName, JdbcTypeNameMapper.getTypeName( typeCode )
JdbcTypeNameMapper.getTypeName( typeCode ) );
); }
} }
} }

View File

@ -19,49 +19,46 @@ public interface JdbcExtractingLogging {
Logger LOGGER = Logger.getLogger( NAME ); Logger LOGGER = Logger.getLogger( NAME );
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
static void logExtracted(int jdbcPosition, int typeCode, Object value) { static void logExtracted(int jdbcPosition, int typeCode, Object value) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef(
JdbcExtractingLogging.LOGGER.tracef( "extracted value ([%s] : [%s]) - [%s]",
"extracted value ([%s] : [%s]) - [%s]", jdbcPosition,
jdbcPosition, JdbcTypeNameMapper.getTypeName( typeCode ),
JdbcTypeNameMapper.getTypeName( typeCode ), value
value );
); }
} }
static void logNullExtracted(int jdbcPosition, int typeCode) { static void logNullExtracted(int jdbcPosition, int typeCode) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef(
JdbcExtractingLogging.LOGGER.tracef( "extracted value ([%s] : [%s]) - [null]",
"extracted value ([%s] : [%s]) - [null]", jdbcPosition,
jdbcPosition, JdbcTypeNameMapper.getTypeName( typeCode )
JdbcTypeNameMapper.getTypeName( typeCode ) );
); }
} }
static void logExtracted(String callableParamName, int typeCode, Object value) { static void logExtracted(String callableParamName, int typeCode, Object value) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef(
JdbcExtractingLogging.LOGGER.tracef( "extracted value ([%s] : [%s]) - [%s]",
"extracted value ([%s] : [%s]) - [%s]", callableParamName,
callableParamName, JdbcTypeNameMapper.getTypeName( typeCode ),
JdbcTypeNameMapper.getTypeName( typeCode ), value
value );
); }
} }
static void logNullExtracted(String callableParamName, int typeCode) { static void logNullExtracted(String callableParamName, int typeCode) {
assert TRACE_ENABLED; if ( LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef(
JdbcExtractingLogging.LOGGER.tracef( "extracted value ([%s] : [%s]) - [null]",
"extracted value ([%s] : [%s]) - [null]", callableParamName,
callableParamName, JdbcTypeNameMapper.getTypeName( typeCode )
JdbcTypeNameMapper.getTypeName( typeCode ) );
); }
} }
} }

View File

@ -42,7 +42,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J>, Serializable {
@Override @Override
public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException { public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
if ( value == null ) { if ( value == null ) {
if ( JdbcBindingLogging.TRACE_ENABLED ) { if ( JdbcBindingLogging.LOGGER.isTraceEnabled() ) {
JdbcBindingLogging.logNullBinding( JdbcBindingLogging.logNullBinding(
index, index,
jdbcType.getDefaultSqlTypeCode() jdbcType.getDefaultSqlTypeCode()
@ -51,7 +51,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J>, Serializable {
doBindNull( st, index, options ); doBindNull( st, index, options );
} }
else { else {
if ( JdbcBindingLogging.TRACE_ENABLED ) { if ( JdbcBindingLogging.LOGGER.isTraceEnabled() ) {
JdbcBindingLogging.logBinding( JdbcBindingLogging.logBinding(
index, index,
jdbcType.getDefaultSqlTypeCode(), jdbcType.getDefaultSqlTypeCode(),
@ -65,7 +65,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J>, Serializable {
@Override @Override
public final void bind(CallableStatement st, J value, String name, WrapperOptions options) throws SQLException { public final void bind(CallableStatement st, J value, String name, WrapperOptions options) throws SQLException {
if ( value == null ) { if ( value == null ) {
if ( JdbcBindingLogging.TRACE_ENABLED ) { if ( JdbcBindingLogging.LOGGER.isTraceEnabled() ) {
JdbcBindingLogging.logNullBinding( JdbcBindingLogging.logNullBinding(
name, name,
jdbcType.getDefaultSqlTypeCode() jdbcType.getDefaultSqlTypeCode()
@ -74,7 +74,7 @@ public abstract class BasicBinder<J> implements ValueBinder<J>, Serializable {
doBindNull( st, name, options ); doBindNull( st, name, options );
} }
else { else {
if ( JdbcBindingLogging.TRACE_ENABLED ) { if ( JdbcBindingLogging.LOGGER.isTraceEnabled() ) {
JdbcBindingLogging.logBinding( JdbcBindingLogging.logBinding(
name, name,
jdbcType.getDefaultSqlTypeCode(), jdbcType.getDefaultSqlTypeCode(),

View File

@ -43,7 +43,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
public J extract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException { public J extract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
final J value = doExtract( rs, paramIndex, options ); final J value = doExtract( rs, paramIndex, options );
if ( value == null || rs.wasNull() ) { if ( value == null || rs.wasNull() ) {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.logNullExtracted( JdbcExtractingLogging.logNullExtracted(
paramIndex, paramIndex,
getJdbcType().getDefaultSqlTypeCode() getJdbcType().getDefaultSqlTypeCode()
@ -52,7 +52,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
return null; return null;
} }
else { else {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.logExtracted( JdbcExtractingLogging.logExtracted(
paramIndex, paramIndex,
getJdbcType().getDefaultSqlTypeCode(), getJdbcType().getDefaultSqlTypeCode(),
@ -79,7 +79,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
public J extract(CallableStatement statement, int paramIndex, WrapperOptions options) throws SQLException { public J extract(CallableStatement statement, int paramIndex, WrapperOptions options) throws SQLException {
final J value = doExtract( statement, paramIndex, options ); final J value = doExtract( statement, paramIndex, options );
if ( value == null || statement.wasNull() ) { if ( value == null || statement.wasNull() ) {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef( JdbcExtractingLogging.LOGGER.tracef(
"extracted procedure output parameter ([%s] : [%s]) - [null]", "extracted procedure output parameter ([%s] : [%s]) - [null]",
paramIndex, paramIndex,
@ -89,7 +89,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
return null; return null;
} }
else { else {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef( JdbcExtractingLogging.LOGGER.tracef(
"extracted procedure output parameter ([%s] : [%s]) - [%s]", "extracted procedure output parameter ([%s] : [%s]) - [%s]",
paramIndex, paramIndex,
@ -117,7 +117,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
public J extract(CallableStatement statement, String paramName, WrapperOptions options) throws SQLException { public J extract(CallableStatement statement, String paramName, WrapperOptions options) throws SQLException {
final J value = doExtract( statement, paramName, options ); final J value = doExtract( statement, paramName, options );
if ( value == null || statement.wasNull() ) { if ( value == null || statement.wasNull() ) {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef( JdbcExtractingLogging.LOGGER.tracef(
"extracted named procedure output parameter ([%s] : [%s]) - [null]", "extracted named procedure output parameter ([%s] : [%s]) - [null]",
paramName, paramName,
@ -127,7 +127,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
return null; return null;
} }
else { else {
if ( JdbcExtractingLogging.TRACE_ENABLED ) { if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
JdbcExtractingLogging.LOGGER.tracef( JdbcExtractingLogging.LOGGER.tracef(
"extracted named procedure output parameter ([%s] : [%s]) - [%s]", "extracted named procedure output parameter ([%s] : [%s]) - [%s]",
paramName, paramName,

View File

@ -137,7 +137,7 @@ public class UserTypeSqlTypeAdapter<J> implements JdbcType {
} }
private void logExtracted(int paramIndex, J extracted) { private void logExtracted(int paramIndex, J extracted) {
if ( ! JdbcExtractingLogging.TRACE_ENABLED ) { if ( ! JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
return; return;
} }
@ -150,7 +150,7 @@ public class UserTypeSqlTypeAdapter<J> implements JdbcType {
} }
private void logExtracted(String paramName, J extracted) { private void logExtracted(String paramName, J extracted) {
if ( ! JdbcExtractingLogging.TRACE_ENABLED ) { if ( ! JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
return; return;
} }
@ -172,7 +172,7 @@ public class UserTypeSqlTypeAdapter<J> implements JdbcType {
@Override @Override
public void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException { public void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
if ( JdbcBindingLogging.TRACE_ENABLED ) { if ( JdbcBindingLogging.LOGGER.isTraceEnabled() ) {
if ( value == null ) { if ( value == null ) {
JdbcBindingLogging.logNullBinding( index, userType.getSqlType() ); JdbcBindingLogging.logNullBinding( index, userType.getSqlType() );
} }