HHH-17000 Do not keep static references to log levels, additional cases
This commit is contained in:
parent
3f1ab5fab1
commit
e6023d260e
|
@ -21,7 +21,4 @@ import org.jboss.logging.Logger;
|
|||
public interface BootLogging {
|
||||
String NAME = SubSystemLogging.BASE + ".boot";
|
||||
Logger BOOT_LOGGER = Logger.getLogger( NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = BOOT_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = BOOT_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -25,9 +25,5 @@ import org.jboss.logging.annotations.ValidIdRange;
|
|||
)
|
||||
public interface JaxbLogger extends BasicLogger {
|
||||
String LOGGER_NAME = BootLogging.NAME + ".jaxb";
|
||||
|
||||
JaxbLogger JAXB_LOGGER = Logger.getMessageLogger( JaxbLogger.class, LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = JAXB_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = JAXB_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -21,7 +21,4 @@ import org.jboss.logging.Logger;
|
|||
public interface BootQueryLogging {
|
||||
String NAME = BootLogging.NAME + ".query";
|
||||
Logger BOOT_QUERY_LOGGER = Logger.getLogger( NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = BOOT_QUERY_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = BOOT_QUERY_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -19,9 +19,5 @@ import org.jboss.logging.Logger;
|
|||
)
|
||||
public interface BytecodeLogging {
|
||||
String LOGGER_NAME = SubSystemLogging.BASE + "bytecode";
|
||||
|
||||
Logger LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -34,9 +34,6 @@ public interface BytecodeInterceptorLogging extends BasicLogger {
|
|||
Logger LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
BytecodeInterceptorLogging MESSAGE_LOGGER = Logger.getMessageLogger(BytecodeInterceptorLogging.class, LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(
|
||||
id = 90005901,
|
||||
|
|
|
@ -18,7 +18,6 @@ import org.hibernate.cache.spi.QueryResultsRegion;
|
|||
import org.hibernate.cache.spi.TimestampsCache;
|
||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||
|
||||
import static org.hibernate.cache.spi.SecondLevelCacheLogger.DEBUG_ENABLED;
|
||||
import static org.hibernate.cache.spi.SecondLevelCacheLogger.L2CACHE_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -51,7 +50,7 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
|
|||
final QueryKey key,
|
||||
final List<?> results,
|
||||
final SharedSessionContractImplementor session) throws HibernateException {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( L2CACHE_LOGGER.isDebugEnabled() ) {
|
||||
L2CACHE_LOGGER.debugf( "Caching query results in region: %s; timestamp=%s",
|
||||
cacheRegion.getName(),
|
||||
session.getCacheTransactionSynchronization().getCachingTimestamp() );
|
||||
|
@ -82,26 +81,27 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
|
|||
final QueryKey key,
|
||||
final Set<String> spaces,
|
||||
final SharedSessionContractImplementor session) throws HibernateException {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
final boolean loggerDebugEnabled = L2CACHE_LOGGER.isDebugEnabled();
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
|
||||
}
|
||||
|
||||
final CacheItem cacheItem = getCachedData( key, session );
|
||||
if ( cacheItem == null ) {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Query results were not found in cache" );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Returning cached query results" );
|
||||
}
|
||||
|
||||
|
@ -113,26 +113,27 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
|
|||
final QueryKey key,
|
||||
final String[] spaces,
|
||||
final SharedSessionContractImplementor session) throws HibernateException {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
final boolean loggerDebugEnabled = L2CACHE_LOGGER.isDebugEnabled();
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
|
||||
}
|
||||
|
||||
final CacheItem cacheItem = getCachedData( key, session );
|
||||
if ( cacheItem == null ) {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Query results were not found in cache" );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Cached query results were not up-to-date" );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
L2CACHE_LOGGER.debug( "Returning cached query results" );
|
||||
}
|
||||
|
||||
|
|
|
@ -33,9 +33,6 @@ public interface SecondLevelCacheLogger extends BasicLogger {
|
|||
|
||||
SecondLevelCacheLogger L2CACHE_LOGGER = Logger.getMessageLogger( SecondLevelCacheLogger.class, LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = L2CACHE_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = L2CACHE_LOGGER.isTraceEnabled();
|
||||
|
||||
int NAMESPACE = 90001000;
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
|
|
|
@ -30,9 +30,6 @@ public interface DialectLogging {
|
|||
Logger DIALECT_LOGGER = Logger.getLogger(LOGGER_NAME);
|
||||
DialectLogging DIALECT_MESSAGE_LOGGER = Logger.getMessageLogger(DialectLogging.class, LOGGER_NAME);
|
||||
|
||||
boolean DEBUG_ENABLED = DIALECT_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = DIALECT_LOGGER.isTraceEnabled();
|
||||
|
||||
@LogMessage(level = DEBUG)
|
||||
@Message(value = "Using dialect: %s", id = 35001)
|
||||
void usingDialect(Dialect dialect);
|
||||
|
|
|
@ -34,9 +34,6 @@ public interface JdbcLogging extends BasicLogger {
|
|||
Logger JDBC_LOGGER = Logger.getLogger( NAME );
|
||||
JdbcLogging JDBC_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcLogging.class, NAME );
|
||||
|
||||
boolean JDBC_TRACE_ENABLED = JDBC_LOGGER.isTraceEnabled();
|
||||
boolean JDBC_DEBUG_ENABLED = JDBC_LOGGER.isDebugEnabled();
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(
|
||||
id=100001,
|
||||
|
|
|
@ -35,9 +35,6 @@ public interface JdbcBatchLogging extends BasicLogger {
|
|||
Logger BATCH_LOGGER = Logger.getLogger( NAME );
|
||||
JdbcBatchLogging BATCH_MESSAGE_LOGGER = Logger.getMessageLogger( JdbcBatchLogging.class, NAME );
|
||||
|
||||
boolean BATCH_TRACE_ENABLED = BATCH_LOGGER.isTraceEnabled();
|
||||
boolean BATCH_DEBUG_ENABLED = BATCH_LOGGER.isDebugEnabled();
|
||||
|
||||
@LogMessage(level = ERROR)
|
||||
@Message(id = 100501, value = "Exception executing batch [%s], SQL: %s")
|
||||
void unableToExecuteBatch(Exception e, String sql );
|
||||
|
|
|
@ -21,7 +21,6 @@ import org.hibernate.sql.model.TableMapping;
|
|||
import org.hibernate.sql.model.jdbc.JdbcInsertMutation;
|
||||
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER;
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_TRACE_ENABLED;
|
||||
import static org.hibernate.jdbc.Expectations.NONE;
|
||||
|
||||
/**
|
||||
|
@ -39,7 +38,7 @@ public class BatchBuilderImpl implements BatchBuilder {
|
|||
* on {@link #buildBatch}
|
||||
*/
|
||||
public BatchBuilderImpl(int globalBatchSize) {
|
||||
if ( BATCH_TRACE_ENABLED ) {
|
||||
if ( BATCH_LOGGER.isTraceEnabled() ) {
|
||||
BATCH_LOGGER.tracef(
|
||||
"Using standard BatchBuilder (%s)",
|
||||
globalBatchSize
|
||||
|
|
|
@ -25,12 +25,9 @@ import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
|
|||
import org.hibernate.resource.jdbc.spi.JdbcObserver;
|
||||
|
||||
import static org.hibernate.engine.jdbc.JdbcLogging.JDBC_MESSAGE_LOGGER;
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_DEBUG_ENABLED;
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_LOGGER;
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_MESSAGE_LOGGER;
|
||||
import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_TRACE_ENABLED;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Standard implementation of Batch
|
||||
|
@ -73,7 +70,7 @@ public class BatchImpl implements Batch {
|
|||
|
||||
this.batchSizeToUse = batchSizeToUse;
|
||||
|
||||
if ( BATCH_TRACE_ENABLED ) {
|
||||
if ( BATCH_LOGGER.isTraceEnabled() ) {
|
||||
BATCH_LOGGER.tracef(
|
||||
"Created Batch (%s) - `%s`",
|
||||
batchSizeToUse,
|
||||
|
@ -99,7 +96,8 @@ public class BatchImpl implements Batch {
|
|||
|
||||
@Override
|
||||
public void addToBatch(JdbcValueBindings jdbcValueBindings, TableInclusionChecker inclusionChecker) {
|
||||
if ( BATCH_TRACE_ENABLED ) {
|
||||
final boolean loggerTraceEnabled = BATCH_LOGGER.isTraceEnabled();
|
||||
if ( loggerTraceEnabled ) {
|
||||
BATCH_LOGGER.tracef(
|
||||
"Adding to JDBC batch (%s) - `%s`",
|
||||
batchPosition + 1,
|
||||
|
@ -110,7 +108,7 @@ public class BatchImpl implements Batch {
|
|||
try {
|
||||
getStatementGroup().forEachStatement( (tableName, statementDetails) -> {
|
||||
if ( inclusionChecker != null && !inclusionChecker.include( statementDetails.getMutatingTableDetails() ) ) {
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( loggerTraceEnabled ) {
|
||||
MODEL_MUTATION_LOGGER.tracef(
|
||||
"Skipping addBatch for table : %s (batch-position=%s)",
|
||||
statementDetails.getMutatingTableDetails().getTableName(),
|
||||
|
@ -229,7 +227,7 @@ public class BatchImpl implements Batch {
|
|||
try {
|
||||
if ( batchPosition == 0 ) {
|
||||
if( !batchExecuted) {
|
||||
if ( BATCH_DEBUG_ENABLED ) {
|
||||
if ( BATCH_LOGGER.isDebugEnabled() ) {
|
||||
BATCH_LOGGER.debugf(
|
||||
"No batched statements to execute - %s",
|
||||
getKey().toLoggableString()
|
||||
|
@ -247,7 +245,7 @@ public class BatchImpl implements Batch {
|
|||
}
|
||||
|
||||
protected void performExecution() {
|
||||
if ( BATCH_TRACE_ENABLED ) {
|
||||
if ( BATCH_LOGGER.isTraceEnabled() ) {
|
||||
BATCH_LOGGER.tracef(
|
||||
"Executing JDBC batch (%s / %s) - `%s`",
|
||||
batchPosition,
|
||||
|
|
|
@ -34,9 +34,6 @@ public interface LobCreationLogging extends BasicLogger {
|
|||
Logger LOB_LOGGER = Logger.getLogger( NAME );
|
||||
LobCreationLogging LOB_MESSAGE_LOGGER = Logger.getMessageLogger( LobCreationLogging.class, NAME );
|
||||
|
||||
boolean LOB_TRACE_ENABLED = LOB_LOGGER.isTraceEnabled();
|
||||
boolean LOB_DEBUG_ENABLED = LOB_LOGGER.isDebugEnabled();
|
||||
|
||||
@LogMessage(level = DEBUG)
|
||||
@Message(value = "Disabling contextual LOB creation as %s is true", id = 10010001)
|
||||
void disablingContextualLOBCreation(String settingName);
|
||||
|
|
|
@ -19,7 +19,6 @@ import org.hibernate.sql.model.TableMapping;
|
|||
import org.hibernate.sql.model.ValuesAnalysis;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Base support for MutationExecutor implementations
|
||||
|
@ -89,7 +88,7 @@ public abstract class AbstractMutationExecutor implements MutationExecutor {
|
|||
|
||||
final TableMapping tableDetails = statementDetails.getMutatingTableDetails();
|
||||
if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) {
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef(
|
||||
"Skipping execution of secondary insert : %s",
|
||||
tableDetails.getTableName()
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.hibernate.sql.model.ValuesAnalysis;
|
|||
import org.hibernate.sql.model.jdbc.JdbcValueDescriptor;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Specialized executor for the case of more than one table operation, with the
|
||||
|
@ -141,7 +140,7 @@ public class MutationExecutorPostInsert implements MutationExecutor, JdbcValueBi
|
|||
final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate();
|
||||
final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session );
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef(
|
||||
"Post-insert generated value : `%s` (%s)",
|
||||
id,
|
||||
|
@ -178,7 +177,7 @@ public class MutationExecutorPostInsert implements MutationExecutor, JdbcValueBi
|
|||
assert !tableDetails.isIdentifierTable();
|
||||
|
||||
if ( inclusionChecker != null && !inclusionChecker.include( tableDetails ) ) {
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef(
|
||||
"Skipping execution of secondary insert : %s",
|
||||
tableDetails.getTableName()
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.hibernate.sql.model.jdbc.JdbcValueDescriptor;
|
|||
|
||||
import static org.hibernate.engine.jdbc.mutation.internal.ModelMutationHelper.identityPreparation;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Specialized form of {@link MutationExecutorPostInsert} for cases where there
|
||||
|
@ -100,7 +99,7 @@ public class MutationExecutorPostInsertSingleTable implements MutationExecutor,
|
|||
final InsertGeneratedIdentifierDelegate identityHandler = mutationTarget.getIdentityInsertDelegate();
|
||||
final Object id = identityHandler.performInsert( identityInsertStatementDetails, valueBindings, modelReference, session );
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef(
|
||||
"Post-insert generated value : `%s` (%s)",
|
||||
id,
|
||||
|
|
|
@ -21,7 +21,4 @@ public interface GraphParserLogging {
|
|||
String LOGGER_NAME = SubSystemLogging.BASE + ".graph.parsing";
|
||||
|
||||
Logger PARSING_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = PARSING_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = PARSING_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -20,7 +20,4 @@ import org.jboss.logging.Logger;
|
|||
public interface IdGenFactoryLogging {
|
||||
String LOGGER_NAME = SubSystemLogging.BASE + ".idgen.factory";
|
||||
Logger ID_GEN_FAC_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean IS_TRACE_ENABLE = ID_GEN_FAC_LOGGER.isTraceEnabled();
|
||||
boolean IS_DEBUG_ENABLE = ID_GEN_FAC_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -37,9 +37,6 @@ public interface UrlMessageBundle {
|
|||
Logger URL_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
UrlMessageBundle URL_MESSAGE_LOGGER = Logger.getMessageLogger( UrlMessageBundle.class, LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = URL_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = URL_LOGGER.isTraceEnabled();
|
||||
|
||||
/**
|
||||
* Logs a warning about a malformed URL, caused by a {@link URISyntaxException}
|
||||
*
|
||||
|
|
|
@ -26,7 +26,4 @@ public interface LoaderLogging {
|
|||
String LOGGER_NAME = SubSystemLogging.BASE + ".loader";
|
||||
|
||||
Logger LOADER_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = LOADER_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = LOADER_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.lang.reflect.Array;
|
|||
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.trimIdBatch;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -78,7 +77,7 @@ public abstract class AbstractCollectionBatchLoader implements CollectionBatchLo
|
|||
|
||||
@Override
|
||||
public PersistentCollection<?> load(Object key, SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching collection: %s.%s",
|
||||
getLoadable().getNavigableRole().getFullPath(), key );
|
||||
}
|
||||
|
@ -104,7 +103,7 @@ public abstract class AbstractCollectionBatchLoader implements CollectionBatchLo
|
|||
return;
|
||||
}
|
||||
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Finishing initializing batch-fetched collection: %s.%s",
|
||||
attributeMapping.getNavigableRole().getFullPath(), key );
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.hibernate.loader.ast.spi.EntityBatchLoader;
|
|||
import org.hibernate.metamodel.mapping.EntityMappingType;
|
||||
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
public abstract class AbstractEntityBatchLoader<T>
|
||||
|
@ -47,7 +46,7 @@ public abstract class AbstractEntityBatchLoader<T>
|
|||
LockOptions lockOptions,
|
||||
Boolean readOnly,
|
||||
SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching entity `%s#%s`", getLoadable().getEntityName(), id );
|
||||
}
|
||||
|
||||
|
@ -62,14 +61,14 @@ public abstract class AbstractEntityBatchLoader<T>
|
|||
Object entityInstance,
|
||||
LockOptions lockOptions,
|
||||
SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Batch fetching entity `%s#%s`", getLoadable().getEntityName(), id );
|
||||
}
|
||||
|
||||
final Object[] ids = resolveIdsToInitialize( id, session );
|
||||
final boolean hasSingleId = hasSingleId( ids );
|
||||
|
||||
final T entity = load( id, ids, hasSingleId, entityInstance, lockOptions, null, session );;
|
||||
final T entity = load( id, ids, hasSingleId, entityInstance, lockOptions, null, session );
|
||||
|
||||
if ( hasSingleId ) {
|
||||
return entity;
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.hibernate.type.BasicType;
|
|||
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.hasSingleId;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.trimIdBatch;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -58,7 +57,7 @@ public class CollectionBatchLoaderArrayParam
|
|||
SessionFactoryImplementor sessionFactory) {
|
||||
super( domainBatchSize, loadQueryInfluencers, attributeMapping, sessionFactory );
|
||||
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Using ARRAY batch fetching strategy for collection `%s` : %s",
|
||||
attributeMapping.getNavigableRole().getFullPath(),
|
||||
|
@ -114,7 +113,7 @@ public class CollectionBatchLoaderArrayParam
|
|||
Object keyBeingLoaded,
|
||||
SharedSessionContractImplementor session,
|
||||
ForeignKeyDescriptor keyDescriptor) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Batch fetching collection: %s.%s",
|
||||
getLoadable().getNavigableRole().getFullPath(), keyBeingLoaded
|
||||
|
@ -165,7 +164,7 @@ public class CollectionBatchLoaderArrayParam
|
|||
|
||||
@Override
|
||||
void initializeKeys(Object key, Object[] keysToInitialize, SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Collection keys to batch-fetch initialize (`%s#%s`) %s",
|
||||
getLoadable().getNavigableRole().getFullPath(),
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.hibernate.sql.exec.spi.JdbcParameterBindings;
|
|||
import org.hibernate.sql.exec.spi.JdbcParametersList;
|
||||
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.countIds;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -51,7 +50,7 @@ public class CollectionBatchLoaderInPredicate
|
|||
.getDialect()
|
||||
.getBatchLoadSizingStrategy()
|
||||
.determineOptimalBatchLoadSize( keyColumnCount, domainBatchSize, false );
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Using IN-predicate batch fetching strategy for collection `%s` : %s (%s)",
|
||||
attributeMapping.getNavigableRole().getFullPath(),
|
||||
|
@ -84,7 +83,8 @@ public class CollectionBatchLoaderInPredicate
|
|||
|
||||
@Override
|
||||
void initializeKeys(Object key, Object[] keysToInitialize, SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
final boolean loggerDebugEnabled = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
|
||||
if ( loggerDebugEnabled ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Collection keys to batch-fetch initialize (`%s#%s`) %s",
|
||||
getLoadable().getNavigableRole().getFullPath(),
|
||||
|
@ -120,7 +120,7 @@ public class CollectionBatchLoaderInPredicate
|
|||
(key1, relativePosition, absolutePosition) -> {
|
||||
},
|
||||
(startIndex) -> {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Processing collection batch-fetch chunk (`%s#%s`) %s - %s",
|
||||
getLoadable().getNavigableRole().getFullPath(),
|
||||
|
@ -131,7 +131,7 @@ public class CollectionBatchLoaderInPredicate
|
|||
}
|
||||
},
|
||||
(startIndex, nonNullElementCount) -> {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Finishing collection batch-fetch chunk (`%s#%s`) %s - %s (%s)",
|
||||
getLoadable().getNavigableRole().getFullPath(),
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.hibernate.sql.exec.spi.JdbcParameterBindings;
|
|||
|
||||
import static org.hibernate.engine.internal.BatchFetchQueueHelper.removeBatchLoadableEntityKey;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.trimIdBatch;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -68,7 +67,7 @@ public class EntityBatchLoaderArrayParam<T>
|
|||
super( entityDescriptor, sessionFactory );
|
||||
this.domainBatchSize = domainBatchSize;
|
||||
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Batch fetching enabled for `%s` (entity) using ARRAY strategy : %s",
|
||||
entityDescriptor.getEntityName(),
|
||||
|
@ -130,7 +129,7 @@ public class EntityBatchLoaderArrayParam<T>
|
|||
LockOptions lockOptions,
|
||||
Boolean readOnly,
|
||||
SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Ids to batch-fetch initialize (`%s#%s`) %s",
|
||||
getLoadable().getEntityName(), id, Arrays.toString(idsToInitialize) );
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.hibernate.sql.exec.spi.JdbcParameterBindings;
|
|||
import org.hibernate.sql.exec.spi.JdbcParametersList;
|
||||
|
||||
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_DEBUG_ENABLED;
|
||||
import static org.hibernate.loader.ast.internal.MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER;
|
||||
|
||||
/**
|
||||
|
@ -52,7 +51,7 @@ public class EntityBatchLoaderInPredicate<T>
|
|||
private final JdbcOperationQuerySelect jdbcSelectOperation;
|
||||
|
||||
/**
|
||||
* @param domainBatchSize The maximum number of entities we will initialize for each {@link #load load}
|
||||
* @param domainBatchSize The maximum number of entities we will initialize for each load
|
||||
*/
|
||||
public EntityBatchLoaderInPredicate(
|
||||
int domainBatchSize,
|
||||
|
@ -66,7 +65,7 @@ public class EntityBatchLoaderInPredicate<T>
|
|||
.getBatchLoadSizingStrategy()
|
||||
.determineOptimalBatchLoadSize( idColumnCount, domainBatchSize, false );
|
||||
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Batch fetching `%s` entity using padded IN-list : %s (%s)",
|
||||
entityDescriptor.getEntityName(),
|
||||
|
@ -129,7 +128,7 @@ public class EntityBatchLoaderInPredicate<T>
|
|||
LockOptions lockOptions,
|
||||
Boolean readOnly,
|
||||
SharedSessionContractImplementor session) {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf( "Ids to batch-fetch initialize (`%s#%s`) %s",
|
||||
getLoadable().getEntityName(), pkValue, Arrays.toString(idsToInitialize) );
|
||||
}
|
||||
|
@ -172,7 +171,7 @@ public class EntityBatchLoaderInPredicate<T>
|
|||
}
|
||||
},
|
||||
(startIndex) -> {
|
||||
if ( MULTI_KEY_LOAD_DEBUG_ENABLED ) {
|
||||
if ( MULTI_KEY_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
MULTI_KEY_LOAD_LOGGER.debugf(
|
||||
"Processing entity batch-fetch chunk (`%s#%s`) %s - %s",
|
||||
getLoadable().getEntityName(),
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
package org.hibernate.loader.ast.internal;
|
||||
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.LockMode;
|
||||
|
@ -64,7 +63,7 @@ public class LoaderHelper {
|
|||
|
||||
final EntityPersister persister = entry.getPersister();
|
||||
|
||||
if ( LoaderLogging.TRACE_ENABLED ) {
|
||||
if ( LoaderLogging.LOADER_LOGGER.isTraceEnabled() ) {
|
||||
LoaderLogging.LOADER_LOGGER.tracef(
|
||||
"Locking `%s( %s )` in `%s` lock-mode",
|
||||
persister.getEntityName(),
|
||||
|
|
|
@ -71,7 +71,7 @@ public class MultiIdEntityLoaderArrayParam<E> extends AbstractMultiIdEntityLoade
|
|||
|
||||
@Override
|
||||
protected <K> List<E> performOrderedMultiLoad(K[] ids, MultiIdLoadOptions loadOptions, EventSource session) {
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
|
||||
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
|
||||
"MultiIdEntityLoaderArrayParam#performOrderedMultiLoad - %s",
|
||||
getLoadable().getEntityName()
|
||||
|
@ -227,7 +227,7 @@ public class MultiIdEntityLoaderArrayParam<E> extends AbstractMultiIdEntityLoade
|
|||
K[] ids,
|
||||
MultiIdLoadOptions loadOptions,
|
||||
EventSource session) {
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
|
||||
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef(
|
||||
"MultiIdEntityLoaderArrayParam#performUnorderedMultiLoad - %s",
|
||||
getLoadable().getEntityName()
|
||||
|
|
|
@ -28,7 +28,4 @@ public interface MultiKeyLoadLogging {
|
|||
String LOGGER_NAME = SubSystemLogging.BASE + ".loader.multi";
|
||||
|
||||
Logger MULTI_KEY_LOAD_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean MULTI_KEY_LOAD_TRACE_ENABLED = MULTI_KEY_LOAD_LOGGER.isTraceEnabled();
|
||||
boolean MULTI_KEY_LOAD_DEBUG_ENABLED = MULTI_KEY_LOAD_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ public class MultiNaturalIdLoaderArrayParam<E> implements MultiNaturalIdLoader<E
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_TRACE_ENABLED ) {
|
||||
if ( MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.isTraceEnabled() ) {
|
||||
MultiKeyLoadLogging.MULTI_KEY_LOAD_LOGGER.tracef( "MultiNaturalIdLoaderArrayParam#multiLoadStarting - `%s`", entityDescriptor.getEntityName() );
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,13 @@ import org.hibernate.HibernateException;
|
|||
import org.hibernate.IdentifierLoadAccess;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.UnknownProfileException;
|
||||
import org.hibernate.engine.spi.*;
|
||||
import org.hibernate.engine.spi.EffectiveEntityGraph;
|
||||
import org.hibernate.engine.spi.EntityEntry;
|
||||
import org.hibernate.engine.spi.EntityKey;
|
||||
import org.hibernate.engine.spi.LoadQueryInfluencers;
|
||||
import org.hibernate.engine.spi.PersistenceContext;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.engine.spi.Status;
|
||||
import org.hibernate.graph.GraphSemantic;
|
||||
import org.hibernate.graph.RootGraph;
|
||||
import org.hibernate.graph.spi.RootGraphImplementor;
|
||||
|
@ -141,13 +147,14 @@ public abstract class BaseNaturalIdLoadAccessImpl<T> implements NaturalIdLoadOpt
|
|||
final Collection<?> cachedPkResolutions =
|
||||
persistenceContext.getNaturalIdResolutions()
|
||||
.getCachedPkResolutions( entityPersister() );
|
||||
final boolean loggerDebugEnabled = LoaderLogging.LOADER_LOGGER.isDebugEnabled();
|
||||
for ( Object pk : cachedPkResolutions ) {
|
||||
final EntityKey entityKey = context.getSession().generateEntityKey( pk, entityPersister() );
|
||||
final Object entity = persistenceContext.getEntity( entityKey );
|
||||
final EntityEntry entry = persistenceContext.getEntry( entity );
|
||||
|
||||
if ( entry == null ) {
|
||||
if ( LoaderLogging.DEBUG_ENABLED ) {
|
||||
if ( loggerDebugEnabled ) {
|
||||
LoaderLogging.LOADER_LOGGER.debugf(
|
||||
"Cached natural-id/pk resolution linked to null EntityEntry in persistence context : %s#%s",
|
||||
entityDescriptor.getEntityName(),
|
||||
|
|
|
@ -29,7 +29,4 @@ public interface MappingModelCreationLogging extends BasicLogger {
|
|||
|
||||
Logger MAPPING_MODEL_CREATION_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
MappingModelCreationLogging MAPPING_MODEL_CREATION_MESSAGE_LOGGER = Logger.getMessageLogger( MappingModelCreationLogging.class, LOGGER_NAME );
|
||||
|
||||
boolean MAPPING_MODEL_CREATION_TRACE_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isTraceEnabled();
|
||||
boolean MAPPING_MODEL_CREATION_DEBUG_ENABLED = MAPPING_MODEL_CREATION_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -22,7 +22,4 @@ import org.jboss.logging.Logger;
|
|||
public interface NaturalIdLogging {
|
||||
String LOGGER_NAME = SubSystemLogging.BASE + ".mapping.natural_id";
|
||||
Logger NATURAL_ID_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = NATURAL_ID_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = NATURAL_ID_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -251,7 +251,7 @@ public class BasicValuedCollectionPart
|
|||
boolean selected,
|
||||
String resultVariable,
|
||||
DomainResultCreationState creationState) {
|
||||
if ( ResultsLogger.DEBUG_ENABLED ) {
|
||||
if ( ResultsLogger.RESULTS_LOGGER.isDebugEnabled() ) {
|
||||
ResultsLogger.RESULTS_LOGGER.debugf(
|
||||
"Generating Fetch for collection-part : `%s` -> `%s`",
|
||||
collectionDescriptor.getRole(),
|
||||
|
|
|
@ -101,7 +101,6 @@ import org.hibernate.type.descriptor.java.MutabilityPlan;
|
|||
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_DEBUG_ENABLED;
|
||||
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
|
||||
import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey;
|
||||
|
||||
|
@ -1710,7 +1709,7 @@ public class MappingModelCreationHelper {
|
|||
|| value instanceof ManyToOne && value.isNullable() && ( (ManyToOne) value ).isIgnoreNotFound() ) {
|
||||
fetchTiming = FetchTiming.IMMEDIATE;
|
||||
if ( lazy ) {
|
||||
if ( MAPPING_MODEL_CREATION_DEBUG_ENABLED ) {
|
||||
if ( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.isDebugEnabled() ) {
|
||||
MAPPING_MODEL_CREATION_MESSAGE_LOGGER.debugf(
|
||||
"Forcing FetchTiming.IMMEDIATE for to-one association : %s.%s",
|
||||
declaringType.getNavigableRole(),
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.hibernate.persister.entity.EntityPersister;
|
|||
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
|
||||
|
||||
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_MESSAGE_LOGGER;
|
||||
import static org.hibernate.metamodel.mapping.MappingModelCreationLogging.MAPPING_MODEL_CREATION_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
|
@ -122,7 +121,7 @@ public class MappingModelCreationProcess {
|
|||
exceptions.put( callbackEntry, e );
|
||||
|
||||
final String format = "Mapping-model creation encountered (possibly) transient error : %s";
|
||||
if ( MAPPING_MODEL_CREATION_TRACE_ENABLED ) {
|
||||
if ( MAPPING_MODEL_CREATION_MESSAGE_LOGGER.isTraceEnabled() ) {
|
||||
MAPPING_MODEL_CREATION_MESSAGE_LOGGER.tracef( e, format, e );
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -642,7 +642,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
|
||||
protected void logStaticSQL() {
|
||||
if ( !ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( !ModelMutationLogging.MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,6 @@ import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
|||
import org.hibernate.sql.model.jdbc.JdbcUpdateMutation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
|
||||
/**
|
||||
* A {@link CollectionPersister} for {@linkplain jakarta.persistence.ElementCollection
|
||||
|
@ -155,7 +154,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
&& !isInverse();
|
||||
|
||||
if ( !performUpdates ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection row updates - %s",
|
||||
getRolePath()
|
||||
|
@ -169,7 +168,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
|
||||
private InsertRowsCoordinator buildInsertRowCoordinator() {
|
||||
if ( isInverse() || !isRowInsertEnabled() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection inserts - %s",
|
||||
getRolePath()
|
||||
|
@ -183,7 +182,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
|
||||
private DeleteRowsCoordinator buildDeleteRowCoordinator() {
|
||||
if ( ! needsRemove() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection row deletions - %s",
|
||||
getRolePath()
|
||||
|
@ -197,7 +196,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
|
||||
private RemoveCoordinator buildDeleteAllCoordinator() {
|
||||
if ( ! needsRemove() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection removals - %s",
|
||||
getRolePath()
|
||||
|
|
|
@ -82,7 +82,6 @@ import org.hibernate.sql.model.jdbc.JdbcUpdateMutation;
|
|||
|
||||
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
import static org.hibernate.sql.model.ast.builder.TableUpdateBuilder.NULL;
|
||||
|
||||
/**
|
||||
|
@ -441,7 +440,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
private InsertRowsCoordinator buildInsertCoordinator() {
|
||||
if ( isInverse() || !isRowInsertEnabled() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection (re)creation - %s",
|
||||
getRolePath()
|
||||
|
@ -459,7 +458,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
private UpdateRowsCoordinator buildUpdateCoordinator() {
|
||||
if ( !isRowDeleteEnabled() && !isRowInsertEnabled() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection row updates - %s",
|
||||
getRolePath()
|
||||
|
@ -477,7 +476,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
private DeleteRowsCoordinator buildDeleteCoordinator() {
|
||||
if ( !needsRemove() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection row deletions - %s",
|
||||
getRolePath()
|
||||
|
@ -502,7 +501,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
private RemoveCoordinator buildDeleteAllCoordinator() {
|
||||
if ( ! needsRemove() ) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Skipping collection removals - %s",
|
||||
getRolePath()
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.hibernate.sql.model.MutationType;
|
|||
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
|
@ -62,7 +61,7 @@ public class DeleteRowsCoordinatorStandard implements DeleteRowsCoordinator {
|
|||
operationGroup = createOperationGroup();
|
||||
}
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Deleting removed collection rows - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.hibernate.sql.model.MutationType;
|
|||
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
|
||||
/**
|
||||
* OneToMany delete coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
|
||||
|
@ -58,7 +57,7 @@ public class DeleteRowsCoordinatorTablePerSubclass implements DeleteRowsCoordina
|
|||
|
||||
@Override
|
||||
public void deleteRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Deleting removed collection rows - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.hibernate.sql.model.MutationType;
|
|||
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
|
@ -68,7 +67,7 @@ public class InsertRowsCoordinatorStandard implements InsertRowsCoordinator {
|
|||
operationGroup = createOperationGroup();
|
||||
}
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Inserting collection rows - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.hibernate.sql.model.MutationType;
|
|||
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
|
||||
/**
|
||||
* OneToMany insert coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
|
||||
|
@ -67,7 +66,8 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
|
|||
Object id,
|
||||
EntryFilter entryChecker,
|
||||
SharedSessionContractImplementor session) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
final boolean loggerDebugEnabled = MODEL_MUTATION_LOGGER.isDebugEnabled();
|
||||
if ( loggerDebugEnabled ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Inserting collection rows - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
@ -81,11 +81,13 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
|
|||
final Iterator<?> entries = collection.entries( collectionDescriptor );
|
||||
collection.preInsert( collectionDescriptor );
|
||||
if ( !entries.hasNext() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"No collection rows to insert - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
id
|
||||
);
|
||||
if ( loggerDebugEnabled ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"No collection rows to insert - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
id
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
final MutationExecutor[] executors = new MutationExecutor[subclassEntries.length];
|
||||
|
@ -124,11 +126,13 @@ public class InsertRowsCoordinatorTablePerSubclass implements InsertRowsCoordina
|
|||
entryCount++;
|
||||
}
|
||||
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Done inserting `%s` collection rows : %s",
|
||||
entryCount,
|
||||
mutationTarget.getRolePath()
|
||||
);
|
||||
if ( loggerDebugEnabled ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Done inserting `%s` collection rows : %s",
|
||||
entryCount,
|
||||
mutationTarget.getRolePath()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -20,8 +20,6 @@ import org.hibernate.sql.model.ast.MutatingTableReference;
|
|||
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Handles complete removal of a collection by its key
|
||||
|
@ -76,7 +74,7 @@ public class RemoveCoordinatorStandard implements RemoveCoordinator {
|
|||
|
||||
@Override
|
||||
public void deleteAllRows(Object key, SharedSessionContractImplementor session) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Deleting collection - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
@ -124,7 +122,7 @@ public class RemoveCoordinatorStandard implements RemoveCoordinator {
|
|||
assert mutationTarget.getTargetPart() != null;
|
||||
assert mutationTarget.getTargetPart().getKeyDescriptor() != null;
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() );
|
||||
}
|
||||
|
||||
|
|
|
@ -22,8 +22,6 @@ import org.hibernate.sql.model.MutationType;
|
|||
import org.hibernate.sql.model.ast.MutatingTableReference;
|
||||
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
|
||||
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* OneToMany remove coordinator if the element is a {@link org.hibernate.persister.entity.UnionSubclassEntityPersister}.
|
||||
|
@ -67,7 +65,7 @@ public class RemoveCoordinatorTablePerSubclass implements RemoveCoordinator {
|
|||
|
||||
@Override
|
||||
public void deleteAllRows(Object key, SharedSessionContractImplementor session) {
|
||||
if ( MODEL_MUTATION_LOGGER_DEBUG_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isDebugEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.debugf(
|
||||
"Deleting collection - %s : %s",
|
||||
mutationTarget.getRolePath(),
|
||||
|
@ -130,7 +128,7 @@ public class RemoveCoordinatorTablePerSubclass implements RemoveCoordinator {
|
|||
assert mutationTarget.getTargetPart() != null;
|
||||
assert mutationTarget.getTargetPart().getKeyDescriptor() != null;
|
||||
|
||||
if ( MODEL_MUTATION_LOGGER_TRACE_ENABLED ) {
|
||||
if ( MODEL_MUTATION_LOGGER.isTraceEnabled() ) {
|
||||
MODEL_MUTATION_LOGGER.tracef( "Starting RemoveCoordinator#buildOperationGroup - %s", mutationTarget.getRolePath() );
|
||||
}
|
||||
|
||||
|
|
|
@ -36,9 +36,6 @@ public interface QueryLogging extends BasicLogger {
|
|||
Logger QUERY_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
QueryLogging QUERY_MESSAGE_LOGGER = Logger.getMessageLogger( QueryLogging.class, LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = QUERY_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = QUERY_LOGGER.isDebugEnabled();
|
||||
|
||||
static String subLoggerName(String subName) {
|
||||
return LOGGER_NAME + '.' + subName;
|
||||
}
|
||||
|
|
|
@ -34,9 +34,6 @@ public interface HqlLogging extends BasicLogger {
|
|||
|
||||
HqlLogging QUERY_LOGGER = Logger.getMessageLogger( HqlLogging.class, LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = QUERY_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = QUERY_LOGGER.isDebugEnabled();
|
||||
|
||||
static String subLoggerName(String subName) {
|
||||
return LOGGER_NAME + '.' + subName;
|
||||
}
|
||||
|
|
|
@ -124,9 +124,7 @@ public class DomainResultCreationStateImpl
|
|||
}
|
||||
|
||||
public void disallowPositionalSelections() {
|
||||
if ( ResultsLogger.DEBUG_ENABLED ) {
|
||||
ResultsLogger.RESULTS_LOGGER.debugf( "Disallowing positional selections : %s", stateIdentifier );
|
||||
}
|
||||
ResultsLogger.RESULTS_LOGGER.debugf( "Disallowing positional selections : %s", stateIdentifier );
|
||||
this.allowPositionalSelections = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,4 @@ public interface SqlTreeCreationLogger {
|
|||
String LOGGER_NAME = SubSystemLogging.BASE + ".sql.ast.create";
|
||||
|
||||
Logger LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.hibernate.sql.ast.tree.update.UpdateStatement;
|
|||
*/
|
||||
public class SqlTreePrinter {
|
||||
public static void logSqlAst(Statement sqlAstStatement) {
|
||||
if ( ! SqlAstTreeLogger.DEBUG_ENABLED ) {
|
||||
if ( ! SqlAstTreeLogger.INSTANCE.isDebugEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,9 +30,7 @@ public class SqlAliasBaseImpl implements SqlAliasBase {
|
|||
public String generateNewAlias() {
|
||||
synchronized (this) {
|
||||
final String alias = stem + "_" + ( aliasCount++ );
|
||||
if ( SqlTreeCreationLogger.DEBUG_ENABLED ) {
|
||||
SqlTreeCreationLogger.LOGGER.debugf( "Created new SQL alias : %s", alias );
|
||||
}
|
||||
SqlTreeCreationLogger.LOGGER.debugf( "Created new SQL alias : %s", alias );
|
||||
return alias;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,4 @@ public interface SqlAstTreeLogger extends BasicLogger {
|
|||
*/
|
||||
SqlAstTreeLogger INSTANCE = Logger.getMessageLogger( SqlAstTreeLogger.class, LOGGER_NAME );
|
||||
|
||||
boolean DEBUG_ENABLED = INSTANCE.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = INSTANCE.isTraceEnabled();
|
||||
}
|
||||
|
|
|
@ -24,7 +24,4 @@ public final class ModelMutationLogging {
|
|||
public static final String NAME = SubSystemLogging.BASE + ".jdbc.mutation";
|
||||
|
||||
public static final Logger MODEL_MUTATION_LOGGER = Logger.getLogger( NAME );
|
||||
|
||||
public static final boolean MODEL_MUTATION_LOGGER_TRACE_ENABLED = MODEL_MUTATION_LOGGER.isTraceEnabled();
|
||||
public static final boolean MODEL_MUTATION_LOGGER_DEBUG_ENABLED = MODEL_MUTATION_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -38,9 +38,6 @@ public interface ResultsLogger extends BasicLogger {
|
|||
|
||||
// todo (6.0) : make sure sql result processing classes use this logger
|
||||
|
||||
boolean TRACE_ENABLED = RESULTS_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = RESULTS_LOGGER.isDebugEnabled();
|
||||
|
||||
static String subLoggerName(String subName) {
|
||||
return LOGGER_NAME + "." + subName;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.hibernate.sql.results.ResultsLogger;
|
|||
import org.jboss.logging.Logger;
|
||||
|
||||
import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.Logging.AST_LOGGER;
|
||||
import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.Logging.TRACE_ENABLED;
|
||||
|
||||
/**
|
||||
* Printer for DomainResult graphs
|
||||
|
@ -32,8 +31,6 @@ public class DomainResultGraphPrinter {
|
|||
interface Logging {
|
||||
String LOGGER_NAME = ResultsLogger.LOGGER_NAME + ".graph.AST";
|
||||
Logger AST_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
boolean DEBUG_ENABLED = AST_LOGGER.isDebugEnabled();
|
||||
boolean TRACE_ENABLED = AST_LOGGER.isTraceEnabled();
|
||||
}
|
||||
|
||||
public static void logDomainResultGraph(List<DomainResult<?>> domainResults) {
|
||||
|
@ -41,7 +38,7 @@ public class DomainResultGraphPrinter {
|
|||
}
|
||||
|
||||
public static void logDomainResultGraph(String header, List<DomainResult<?>> domainResults) {
|
||||
if ( ! Logging.DEBUG_ENABLED ) {
|
||||
if ( !AST_LOGGER.isDebugEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -69,7 +66,7 @@ public class DomainResultGraphPrinter {
|
|||
|
||||
AST_LOGGER.debug( buffer.toString() );
|
||||
|
||||
if ( TRACE_ENABLED ) {
|
||||
if ( AST_LOGGER.isTraceEnabled() ) {
|
||||
AST_LOGGER.tracef( new Exception(), "Stack trace calling DomainResultGraphPrinter" );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,9 +58,7 @@ public class BasicResultAssembler<J> implements DomainResultAssembler<J> {
|
|||
JdbcValuesSourceProcessingOptions options) {
|
||||
final Object jdbcValue = extractRawValue( rowProcessingState );
|
||||
|
||||
if ( ResultsLogger.DEBUG_ENABLED ) {
|
||||
ResultsLogger.RESULTS_LOGGER.debugf( "Extracted JDBC value [%d] - [%s]", valuesArrayPosition, jdbcValue );
|
||||
}
|
||||
ResultsLogger.RESULTS_LOGGER.debugf( "Extracted JDBC value [%d] - [%s]", valuesArrayPosition, jdbcValue );
|
||||
|
||||
if ( valueConverter != null ) {
|
||||
if ( jdbcValue != null ) {
|
||||
|
|
|
@ -26,7 +26,4 @@ public interface CollectionLoadingLogger extends BasicLogger {
|
|||
* Static access to the logging instance
|
||||
*/
|
||||
Logger COLL_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = COLL_LOAD_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = COLL_LOAD_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -80,7 +80,7 @@ public abstract class AbstractCollectionInitializer implements CollectionInitial
|
|||
collectionKeyValue
|
||||
);
|
||||
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
CollectionLoadingLogger.COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Current row collection key : %s",
|
||||
this.getClass().getSimpleName(),
|
||||
|
|
|
@ -70,7 +70,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
return;
|
||||
}
|
||||
|
||||
if ( CollectionLoadingLogger.TRACE_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isTraceEnabled() ) {
|
||||
COLL_LOAD_LOGGER.tracef(
|
||||
"(%s) Beginning Initializer#resolveInstance for collection : %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -93,7 +93,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
if ( existingLoadingEntry != null ) {
|
||||
collectionInstance = existingLoadingEntry.getCollectionInstance();
|
||||
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Found existing loading collection entry [%s]; using loading collection instance - %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -108,7 +108,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
}
|
||||
else {
|
||||
// the entity is already being loaded elsewhere
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Collection [%s] being loaded by another initializer [%s] - skipping processing",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -130,7 +130,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
// it is already initialized we have nothing to do
|
||||
|
||||
if ( collectionInstance.wasInitialized() ) {
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Found existing collection instance [%s] in Session; skipping processing - [%s]",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -155,7 +155,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
// it is already initialized we have nothing to do
|
||||
|
||||
if ( collectionInstance.wasInitialized() ) {
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Found existing unowned collection instance [%s] in Session; skipping processing - [%s]",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -184,7 +184,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
session
|
||||
);
|
||||
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Created new collection wrapper [%s] : %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -203,7 +203,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
}
|
||||
|
||||
if ( responsibility != null ) {
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Responsible for loading collection [%s] : %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -285,7 +285,7 @@ public abstract class AbstractImmediateCollectionInitializer extends AbstractCol
|
|||
|
||||
if ( collectionValueKey != null ) {
|
||||
// the row contains an element in the collection...
|
||||
if ( CollectionLoadingLogger.DEBUG_ENABLED ) {
|
||||
if ( CollectionLoadingLogger.COLL_LOAD_LOGGER.isDebugEnabled() ) {
|
||||
COLL_LOAD_LOGGER.debugf(
|
||||
"(%s) Reading element from row for collection [%s] -> %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
|
|
@ -31,7 +31,4 @@ public interface EmbeddableLoadingLogger extends BasicLogger {
|
|||
*/
|
||||
Logger EMBEDDED_LOAD_LOGGER = LoadingLogger.subLogger( LOGGER_NAME );
|
||||
|
||||
|
||||
boolean TRACE_ENABLED = EMBEDDED_LOAD_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = EMBEDDED_LOAD_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -272,7 +272,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
// AbstractEntityPersister#getSequentialSelect in 5.2
|
||||
|
||||
if ( entityKey == null ) {
|
||||
if ( EntityLoadingLogging.TRACE_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
|
||||
"(%s) Beginning Initializer#resolveKey process for entity : %s",
|
||||
StringHelper.collapse( this.getClass().getName() ),
|
||||
|
@ -295,7 +295,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
assert missing;
|
||||
}
|
||||
else {
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Hydrated EntityKey (%s): %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -480,7 +480,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
LoadingEntityEntry existingLoadingEntry,
|
||||
Object entityIdentifier) {
|
||||
|
||||
if ( EntityLoadingLogging.TRACE_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
|
||||
"(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s",
|
||||
StringHelper.collapse( this.getClass().getName() ),
|
||||
|
@ -603,7 +603,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
|
||||
private void setIsOwningInitializer(Object entityIdentifier,LoadingEntityEntry existingLoadingEntry) {
|
||||
if ( existingLoadingEntry != null ) {
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Found existing loading entry [%s] - using loading instance",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -633,7 +633,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
}
|
||||
else {
|
||||
// the entity is already being loaded elsewhere
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -671,7 +671,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
|
||||
protected Object instantiateEntity(Object entityIdentifier, SharedSessionContractImplementor session) {
|
||||
final Object instance = session.instantiate( concreteDescriptor, entityKey.getIdentifier() );
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Created new entity instance [%s] : %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -815,7 +815,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
final SharedSessionContractImplementor session = rowProcessingState.getSession();
|
||||
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
|
||||
|
||||
if ( EntityLoadingLogging.TRACE_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
|
||||
"(%s) Beginning Initializer#initializeInstance process for entity %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -871,7 +871,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
|
||||
concreteDescriptor.afterInitialize( toInitialize, session );
|
||||
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Done materializing entityInstance : %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
@ -957,7 +957,7 @@ public abstract class AbstractEntityInitializer extends AbstractFetchParentAcces
|
|||
EntityDataAccess cacheAccess) {
|
||||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%S) Adding entityInstance to second-level cache: %s",
|
||||
getSimpleConcreteImplName(),
|
||||
|
|
|
@ -22,7 +22,4 @@ import org.jboss.logging.Logger;
|
|||
public interface EntityLoadingLogging {
|
||||
String LOGGER_NAME = LoadingLogger.LOGGER_NAME + ".entity";
|
||||
Logger ENTITY_LOADING_LOGGER = Logger.getLogger( LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = ENTITY_LOADING_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = ENTITY_LOADING_LOGGER.isDebugEnabled();
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
|
|||
return;
|
||||
}
|
||||
|
||||
if ( EntityLoadingLogging.TRACE_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isTraceEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.tracef(
|
||||
"(%s) Beginning Initializer#resolveInstance process for entity (%s) : %s",
|
||||
StringHelper.collapse( this.getClass().getName() ),
|
||||
|
@ -144,7 +144,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
|
|||
final LoadingEntityEntry existingLoadingEntry = loadContexts.findLoadingEntityEntry( entityKey );
|
||||
|
||||
if ( existingLoadingEntry != null ) {
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Found existing loading entry [%s] - using loading instance",
|
||||
CONCRETE_NAME,
|
||||
|
@ -159,7 +159,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
|
|||
final EntityInitializer entityInitializer = existingLoadingEntry.getEntityInitializer();
|
||||
if ( entityInitializer != this ) {
|
||||
// the entity is already being loaded elsewhere
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Entity [%s] being loaded by another initializer [%s] - skipping processing",
|
||||
CONCRETE_NAME,
|
||||
|
@ -180,7 +180,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
|
|||
}
|
||||
}
|
||||
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Invoking session#internalLoad for entity (%s) : %s",
|
||||
CONCRETE_NAME,
|
||||
|
@ -206,7 +206,7 @@ public class EntitySelectFetchInitializer extends AbstractFetchParentAccess impl
|
|||
);
|
||||
}
|
||||
|
||||
if ( EntityLoadingLogging.DEBUG_ENABLED ) {
|
||||
if ( EntityLoadingLogging.ENTITY_LOADING_LOGGER.isDebugEnabled() ) {
|
||||
EntityLoadingLogging.ENTITY_LOADING_LOGGER.debugf(
|
||||
"(%s) Entity [%s] : %s has being loaded by session.internalLoad.",
|
||||
CONCRETE_NAME,
|
||||
|
|
|
@ -146,7 +146,7 @@ public class ResultsHelper {
|
|||
}
|
||||
|
||||
private static void logInitializers(Map<NavigablePath, Initializer> initializerMap) {
|
||||
if ( ! ResultsLogger.DEBUG_ENABLED ) {
|
||||
if ( ! ResultsLogger.RESULTS_MESSAGE_LOGGER.isDebugEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -246,9 +246,7 @@ public class ResultsHelper {
|
|||
|
||||
if ( session.getLoadQueryInfluencers().hasEnabledFilters() && collectionDescriptor.isAffectedByEnabledFilters( session ) ) {
|
||||
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debug( "Refusing to add to cache due to enabled filters" );
|
||||
}
|
||||
LOG.debug( "Refusing to add to cache due to enabled filters" );
|
||||
// todo : add the notion of enabled filters to the cache key to differentiate filtered collections from non-filtered;
|
||||
// DefaultInitializeCollectionEventHandler.initializeCollectionFromCache() (which makes sure to not read from
|
||||
// cache with enabled filters).
|
||||
|
@ -273,9 +271,7 @@ public class ResultsHelper {
|
|||
}
|
||||
}
|
||||
if ( collectionOwner == null ) {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf( "Unable to resolve owner of loading collection for second level caching. Refusing to add to cache.");
|
||||
}
|
||||
LOG.debugf( "Unable to resolve owner of loading collection for second level caching. Refusing to add to cache.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,49 +19,46 @@ public interface JdbcExtractingLogging {
|
|||
|
||||
Logger LOGGER = Logger.getLogger( NAME );
|
||||
|
||||
boolean TRACE_ENABLED = LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
|
||||
static void logExtracted(int jdbcPosition, int typeCode, Object value) {
|
||||
assert TRACE_ENABLED;
|
||||
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [%s]",
|
||||
jdbcPosition,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode ),
|
||||
value
|
||||
);
|
||||
if ( LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [%s]",
|
||||
jdbcPosition,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode ),
|
||||
value
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static void logNullExtracted(int jdbcPosition, int typeCode) {
|
||||
assert TRACE_ENABLED;
|
||||
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [null]",
|
||||
jdbcPosition,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode )
|
||||
);
|
||||
if ( LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [null]",
|
||||
jdbcPosition,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode )
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static void logExtracted(String callableParamName, int typeCode, Object value) {
|
||||
assert TRACE_ENABLED;
|
||||
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [%s]",
|
||||
callableParamName,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode ),
|
||||
value
|
||||
);
|
||||
if ( LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [%s]",
|
||||
callableParamName,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode ),
|
||||
value
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static void logNullExtracted(String callableParamName, int typeCode) {
|
||||
assert TRACE_ENABLED;
|
||||
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [null]",
|
||||
callableParamName,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode )
|
||||
);
|
||||
if ( LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted value (%s:%s) -> [null]",
|
||||
callableParamName,
|
||||
JdbcTypeNameMapper.getTypeName( typeCode )
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
public J extract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
|
||||
final J value = doExtract( rs, paramIndex, options );
|
||||
if ( value == null || rs.wasNull() ) {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.logNullExtracted(
|
||||
paramIndex,
|
||||
getJdbcType().getDefaultSqlTypeCode()
|
||||
|
@ -52,7 +52,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
return null;
|
||||
}
|
||||
else {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.logExtracted(
|
||||
paramIndex,
|
||||
getJdbcType().getDefaultSqlTypeCode(),
|
||||
|
@ -79,7 +79,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
public J extract(CallableStatement statement, int paramIndex, WrapperOptions options) throws SQLException {
|
||||
final J value = doExtract( statement, paramIndex, options );
|
||||
if ( value == null || statement.wasNull() ) {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted procedure output parameter ([%s] : [%s]) - [null]",
|
||||
paramIndex,
|
||||
|
@ -89,7 +89,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
return null;
|
||||
}
|
||||
else {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted procedure output parameter ([%s] : [%s]) - [%s]",
|
||||
paramIndex,
|
||||
|
@ -117,7 +117,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
public J extract(CallableStatement statement, String paramName, WrapperOptions options) throws SQLException {
|
||||
final J value = doExtract( statement, paramName, options );
|
||||
if ( value == null || statement.wasNull() ) {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted named procedure output parameter ([%s] : [%s]) - [null]",
|
||||
paramName,
|
||||
|
@ -127,7 +127,7 @@ public abstract class BasicExtractor<J> implements ValueExtractor<J>, Serializab
|
|||
return null;
|
||||
}
|
||||
else {
|
||||
if ( JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
JdbcExtractingLogging.LOGGER.tracef(
|
||||
"extracted named procedure output parameter ([%s] : [%s]) - [%s]",
|
||||
paramName,
|
||||
|
|
|
@ -136,7 +136,7 @@ public class UserTypeSqlTypeAdapter<J> implements JdbcType {
|
|||
}
|
||||
|
||||
private void logExtracted(int paramIndex, J extracted) {
|
||||
if ( ! JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( ! JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -149,7 +149,7 @@ public class UserTypeSqlTypeAdapter<J> implements JdbcType {
|
|||
}
|
||||
|
||||
private void logExtracted(String paramName, J extracted) {
|
||||
if ( ! JdbcExtractingLogging.TRACE_ENABLED ) {
|
||||
if ( ! JdbcExtractingLogging.LOGGER.isTraceEnabled() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue