fix some logging performance issues

This commit is contained in:
Nathan Xu 2020-03-17 18:01:06 -04:00 committed by Steve Ebersole
parent 233b8daffb
commit 4c856725f8
70 changed files with 386 additions and 264 deletions

View File

@ -93,7 +93,7 @@ public class ArchiveHelper {
"Unable to determine JAR Url from " + url + ". Cause: " + e.getMessage()
);
}
log.trace( "JAR URL from URL Entry: " + url + " >> " + jarUrl );
log.tracef( "JAR URL from URL Entry: %s >> %s", url, jarUrl );
return jarUrl;
}

View File

@ -130,12 +130,14 @@ public class StandardArchiveDescriptorFactory implements ArchiveDescriptorFactor
}
catch (MalformedURLException e) {
// allow to pass through to return the original URL
log.debugf(
e,
"Unable to adjust relative <jar-file/> URL [%s] relative to root URL [%s]",
filePart,
rootUrlFile.getAbsolutePath()
);
if ( log.isDebugEnabled() ) {
log.debugf(
e,
"Unable to adjust relative <jar-file/> URL [%s] relative to root URL [%s]",
filePart,
rootUrlFile.getAbsolutePath()
);
}
}
return url;

View File

@ -715,7 +715,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
log.tracev( "Import: {0} -> {1}", importName, entityName );
String old = imports.put( importName, entityName );
if ( old != null ) {
log.debug( "import name [" + importName + "] overrode previous [{" + old + "}]" );
log.debugf( "import name [%s] overrode previous [{%s}]", importName, old );
}
}

View File

@ -96,7 +96,7 @@ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeCont
return ( StandardServiceRegistry ) serviceRegistry;
}
else if ( BootstrapServiceRegistry.class.isInstance( serviceRegistry ) ) {
log.debugf(
log.debug(
"ServiceRegistry passed to MetadataBuilder was a BootstrapServiceRegistry; this likely won't end well " +
"if attempt is made to build SessionFactory"
);

View File

@ -57,7 +57,9 @@ public class MappingBinder extends AbstractBinder {
Origin origin) {
final String rootElementLocalName = rootElementStartEvent.getName().getLocalPart();
if ( "hibernate-mapping".equals( rootElementLocalName ) ) {
log.debugf( "Performing JAXB binding of hbm.xml document : %s", origin.toString() );
if ( log.isDebugEnabled() ) {
log.debugf( "Performing JAXB binding of hbm.xml document : %s", origin.toString() );
}
XMLEventReader hbmReader = new HbmEventReader( staxEventReader, xmlEventFactory );
JaxbHbmHibernateMapping hbmBindings = jaxb( hbmReader, MappingXsdSupport.INSTANCE.hbmXsd().getSchema(), hbmJaxbContext(), origin );

View File

@ -96,15 +96,17 @@ public class AttributeConverterManager implements ConverterAutoApplyHandler {
List<ConverterDescriptor> matches = new ArrayList<>();
for ( ConverterDescriptor descriptor : converterDescriptors() ) {
log.debugf(
"Checking auto-apply AttributeConverter [%s] (domain-type=%s) for match against %s : %s.%s (type=%s)",
descriptor.getAttributeConverterClass().getName(),
descriptor.getDomainValueResolvedType().getSignature(),
conversionSite.getSiteDescriptor(),
xProperty.getDeclaringClass().getName(),
xProperty.getName(),
xProperty.getType().getName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Checking auto-apply AttributeConverter [%s] (domain-type=%s) for match against %s : %s.%s (type=%s)",
descriptor.getAttributeConverterClass().getName(),
descriptor.getDomainValueResolvedType().getSignature(),
conversionSite.getSiteDescriptor(),
xProperty.getDeclaringClass().getName(),
xProperty.getName(),
xProperty.getType().getName()
);
}
final ConverterDescriptor match = matcher.apply( descriptor.getAutoApplyDescriptor() );

View File

@ -268,11 +268,13 @@ public class ScanningCoordinator {
continue;
}
log.debugf(
"Unable to resolve class [%s] named in persistence unit [%s]",
unresolvedListedClassName,
scanEnvironment.getRootUrl()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Unable to resolve class [%s] named in persistence unit [%s]",
unresolvedListedClassName,
scanEnvironment.getRootUrl()
);
}
}
}
}

View File

@ -48,11 +48,13 @@ public class Namespace {
.toPhysicalSchemaName( name.getSchema(), jdbcEnvironment )
);
log.debugf(
"Created database namespace [logicalName=%s, physicalName=%s]",
name.toString(),
physicalName.toString()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Created database namespace [logicalName=%s, physicalName=%s]",
name.toString(),
physicalName.toString()
);
}
}
public Name getName() {

View File

@ -43,7 +43,7 @@ class FilterDefinitionBinder {
if ( content instanceof String ) {
final String contentString = content.toString().trim();
if ( StringHelper.isNotEmpty( contentString ) ) {
if ( condition != null ) {
if ( condition != null && log.isDebugEnabled() ) {
log.debugf(
"filter-def [name=%s, origin=%s] defined multiple conditions, accepting arbitrary one",
jaxbFilterDefinitionMapping.getName(),

View File

@ -132,11 +132,13 @@ public class HbmMetadataSourceProcessorImpl implements MetadataSourceProcessor {
hierarchy_loop : for ( EntityHierarchySourceImpl entityHierarchy : entityHierarchies ) {
for ( String entityName : entityHierarchy.getContainedEntityNames() ) {
if ( processedEntityNames.contains( entityName ) ) {
log.debugf(
"Skipping HBM processing of entity hierarchy [%s], as at least one entity [%s] has been processed",
entityHierarchy.getRoot().getEntityNamingSource().getEntityName(),
entityName
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Skipping HBM processing of entity hierarchy [%s], as at least one entity [%s] has been processed",
entityHierarchy.getRoot().getEntityNamingSource().getEntityName(),
entityName
);
}
continue hierarchy_loop;
}
}

View File

@ -3288,11 +3288,13 @@ public class ModelBinder {
prop.setValue( collectionBinding.getKey() );
referenced.addProperty( prop );
log.debugf(
"Added virtual backref property [%s] : %s",
prop.getName(),
pluralAttributeSource.getAttributeRole().getFullPath()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Added virtual backref property [%s] : %s",
prop.getName(),
pluralAttributeSource.getAttributeRole().getFullPath()
);
}
}
}
@ -3388,11 +3390,13 @@ public class ModelBinder {
}
protected void bindCollectionElement() {
log.debugf(
"Binding [%s] element type for a [%s]",
getPluralAttributeSource().getElementSource().getNature(),
getPluralAttributeSource().getNature()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Binding [%s] element type for a [%s]",
getPluralAttributeSource().getElementSource().getNature(),
getPluralAttributeSource().getNature()
);
}
if ( getPluralAttributeSource().getElementSource() instanceof PluralAttributeElementSourceBasic ) {
final PluralAttributeElementSourceBasic elementSource =
(PluralAttributeElementSourceBasic) getPluralAttributeSource().getElementSource();
@ -3580,10 +3584,12 @@ public class ModelBinder {
for ( FilterSource filterSource : elementSource.getFilterSources() ) {
if ( filterSource.getName() == null ) {
log.debugf(
"Encountered filter with no name associated with many-to-many [%s]; skipping",
getPluralAttributeSource().getAttributeRole().getFullPath()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Encountered filter with no name associated with many-to-many [%s]; skipping",
getPluralAttributeSource().getAttributeRole().getFullPath()
);
}
continue;
}
@ -4224,7 +4230,7 @@ public class ModelBinder {
@Override
public void process() {
log.debugf( "Binding natural-id UniqueKey for entity : " + entityBinding.getEntityName() );
log.debugf( "Binding natural-id UniqueKey for entity : %s", entityBinding.getEntityName() );
final List<Identifier> columnNames = new ArrayList<>();

View File

@ -37,11 +37,13 @@ public class TypeDefinitionBinder {
context.getMetadataCollector().getTypeConfiguration()
);
log.debugf(
"Processed type-definition : %s -> %s",
definition.getName(),
definition.getTypeImplementorClass().getName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Processed type-definition : %s -> %s",
definition.getName(),
definition.getTypeImplementorClass().getName()
);
}
context.getTypeDefinitionRegistry().register( definition );
}

View File

@ -70,7 +70,9 @@ public class StrategySelectorBuilder {
public <T> void addExplicitStrategyRegistration(StrategyRegistration<T> strategyRegistration) {
if ( !strategyRegistration.getStrategyRole().isInterface() ) {
// not good form...
log.debug( "Registering non-interface strategy : " + strategyRegistration.getStrategyRole().getName() );
if ( log.isDebugEnabled() ) {
log.debugf( "Registering non-interface strategy : %s", strategyRegistration.getStrategyRole().getName() );
}
}
if ( ! strategyRegistration.getStrategyRole().isAssignableFrom( strategyRegistration.getStrategyImplementation() ) ) {

View File

@ -58,12 +58,14 @@ final class BiDirectionalAssociationHandler implements Implementation {
}
String mappedBy = getMappedBy( persistentField, targetEntity, enhancementContext );
if ( mappedBy == null || mappedBy.isEmpty() ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: Could not find target field in [%s]",
managedCtClass.getName(),
persistentField.getName(),
targetEntity.getCanonicalName()
);
if ( log.isInfoEnabled() ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: Could not find target field in [%s]",
managedCtClass.getName(),
persistentField.getName(),
targetEntity.getCanonicalName()
);
}
return implementation;
}
@ -102,11 +104,13 @@ final class BiDirectionalAssociationHandler implements Implementation {
if ( persistentField.hasAnnotation( ManyToMany.class ) ) {
if ( persistentField.getType().asErasure().isAssignableTo( Map.class ) || targetType.isAssignableTo( Map.class ) ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: @ManyToMany in java.util.Map attribute not supported ",
managedCtClass.getName(),
persistentField.getName()
);
if ( log.isInfoEnabled() ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: @ManyToMany in java.util.Map attribute not supported ",
managedCtClass.getName(),
persistentField.getName()
);
}
return implementation;
}
@ -146,11 +150,13 @@ final class BiDirectionalAssociationHandler implements Implementation {
}
if ( targetClass == null ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: Could not find target type",
managedCtClass.getName(),
persistentField.getName()
);
if ( log.isInfoEnabled() ) {
log.infof(
"Bi-directional association not managed for field [%s#%s]: Could not find target type",
managedCtClass.getName(),
persistentField.getName()
);
}
return null;
}
else if ( !targetClass.resolve( TypeDescription.class ).represents( void.class ) ) {
@ -231,13 +237,15 @@ final class BiDirectionalAssociationHandler implements Implementation {
if ( context.isPersistentField( annotatedF )
&& target.getName().equals( getMappedByNotManyToMany( annotatedF ) )
&& target.getDeclaringType().asErasure().isAssignableTo( entityType( annotatedF.getType() ) ) ) {
log.debugf(
"mappedBy association for field [%s#%s] is [%s#%s]",
target.getDeclaringType().asErasure().getName(),
target.getName(),
targetEntity.getName(),
f.getName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"mappedBy association for field [%s#%s] is [%s#%s]",
target.getDeclaringType().asErasure().getName(),
target.getName(),
targetEntity.getName(),
f.getName()
);
}
return f.getName();
}
}

View File

@ -76,13 +76,15 @@ final class FieldAccessEnhancer implements AsmVisitorWrapper.ForDeclaredMethods.
&& !field.hasAnnotation( Id.class )
&& !field.getName().equals( "this$0" ) ) {
log.debugf(
"Extended enhancement: Transforming access to field [%s.%s] from method [%s#%s]",
field.getType().asErasure(),
field.getName(),
field.getName(),
name
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Extended enhancement: Transforming access to field [%s.%s] from method [%s#%s]",
field.getType().asErasure(),
field.getName(),
field.getName(),
name
);
}
switch ( opcode ) {
case Opcodes.GETFIELD:

View File

@ -90,7 +90,13 @@ final class PersistentAttributeTransformer implements AsmVisitorWrapper.ForDecla
}
AnnotatedFieldDescription[] orderedFields = enhancementContext.order( persistentFieldList.toArray( new AnnotatedFieldDescription[0] ) );
log.debugf( "Persistent fields for entity %s: %s", managedCtClass.getName(), Arrays.toString( orderedFields ) );
if ( log.isDebugEnabled() ) {
log.debugf(
"Persistent fields for entity %s: %s",
managedCtClass.getName(),
Arrays.toString( orderedFields )
);
}
return new PersistentAttributeTransformer( managedCtClass, enhancementContext, classPool, orderedFields );
}

View File

@ -94,7 +94,13 @@ public class PersistentAttributesEnhancer extends EnhancerImpl {
}
CtField[] orderedFields = enhancementContext.order( persistentFieldList.toArray( new CtField[0] ) );
log.debugf( "Persistent fields for entity %s: %s", managedCtClass.getName(), Arrays.toString( orderedFields ) );
if ( log.isDebugEnabled() ) {
log.debugf(
"Persistent fields for entity %s: %s",
managedCtClass.getName(),
Arrays.toString( orderedFields )
);
}
return orderedFields;
}

View File

@ -114,10 +114,12 @@ public class RegionFactoryInitiator implements StandardServiceInitiator<RegionFa
return registeredFactory;
}
else {
LOG.debugf(
"Cannot default RegionFactory based on registered strategies as `%s` RegionFactory strategies were registered",
implementors
);
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Cannot default RegionFactory based on registered strategies as `%s` RegionFactory strategies were registered",
implementors
);
}
}
return NoCachingRegionFactory.INSTANCE;

View File

@ -2142,7 +2142,9 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
else {
// most likely the <result-class/> this code used to handle. I have left the code here,
// but commented it out for now. I'll just log a warning for now.
LOG.debug( "Encountered unrecognized sql-result-set-mapping sub-element : " + resultElement.getName() );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Encountered unrecognized sql-result-set-mapping sub-element : " + resultElement.getName() );
}
// String clazzName = subelement.attributeValue( "result-class" );
// if ( StringHelper.isNotEmpty( clazzName ) ) {

View File

@ -1329,7 +1329,7 @@ public abstract class AbstractHANADialect extends Dialect {
private long getLockWaitTimeoutInSeconds(int timeoutInMilliseconds) {
Duration duration = Duration.ofMillis( timeoutInMilliseconds );
long timeoutInSeconds = duration.getSeconds();
if ( duration.getNano() != 0 ) {
if ( duration.getNano() != 0 && LOG.isInfoEnabled() ) {
LOG.info( "Changing the query timeout from " + timeoutInMilliseconds + " ms to " + timeoutInSeconds
+ " s, because HANA requires the timeout in seconds" );
}

View File

@ -494,13 +494,7 @@ public class EntityEntryContext {
final Method deserializeMethod = entityEntryClass.getDeclaredMethod( "deserialize", ObjectInputStream.class, PersistenceContext.class );
entry = (EntityEntry) deserializeMethod.invoke( null, ois, rtn );
}
catch (NoSuchMethodException e) {
log.errorf( "Enable to deserialize [%s]", entityEntryClassName );
}
catch (InvocationTargetException e) {
log.errorf( "Enable to deserialize [%s]", entityEntryClassName );
}
catch (IllegalAccessException e) {
catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
log.errorf( "Enable to deserialize [%s]", entityEntryClassName );
}

View File

@ -255,43 +255,45 @@ public class StatisticalLoggingSessionEventListener extends BaseSessionEventList
@Override
public void end() {
log.infof(
"Session Metrics {\n" +
" %s nanoseconds spent acquiring %s JDBC connections;\n" +
" %s nanoseconds spent releasing %s JDBC connections;\n" +
" %s nanoseconds spent preparing %s JDBC statements;\n" +
" %s nanoseconds spent executing %s JDBC statements;\n" +
" %s nanoseconds spent executing %s JDBC batches;\n" +
" %s nanoseconds spent performing %s L2C puts;\n" +
" %s nanoseconds spent performing %s L2C hits;\n" +
" %s nanoseconds spent performing %s L2C misses;\n" +
" %s nanoseconds spent executing %s flushes (flushing a total of %s entities and %s collections);\n" +
" %s nanoseconds spent executing %s partial-flushes (flushing a total of %s entities and %s collections)\n" +
"}",
jdbcConnectionAcquisitionTime,
jdbcConnectionAcquisitionCount,
jdbcConnectionReleaseTime,
jdbcConnectionReleaseCount,
jdbcPrepareStatementTime,
jdbcPrepareStatementCount,
jdbcExecuteStatementTime,
jdbcExecuteStatementCount,
jdbcExecuteBatchTime,
jdbcExecuteBatchCount,
cachePutTime,
cachePutCount,
cacheHitTime,
cacheHitCount,
cacheMissTime,
cacheMissCount,
flushTime,
flushCount,
flushEntityCount,
flushCollectionCount,
partialFlushTime,
partialFlushCount,
partialFlushEntityCount,
partialFlushCollectionCount
);
if ( log.isInfoEnabled() ) {
log.infof(
"Session Metrics {\n" +
" %s nanoseconds spent acquiring %s JDBC connections;\n" +
" %s nanoseconds spent releasing %s JDBC connections;\n" +
" %s nanoseconds spent preparing %s JDBC statements;\n" +
" %s nanoseconds spent executing %s JDBC statements;\n" +
" %s nanoseconds spent executing %s JDBC batches;\n" +
" %s nanoseconds spent performing %s L2C puts;\n" +
" %s nanoseconds spent performing %s L2C hits;\n" +
" %s nanoseconds spent performing %s L2C misses;\n" +
" %s nanoseconds spent executing %s flushes (flushing a total of %s entities and %s collections);\n" +
" %s nanoseconds spent executing %s partial-flushes (flushing a total of %s entities and %s collections)\n" +
"}",
jdbcConnectionAcquisitionTime,
jdbcConnectionAcquisitionCount,
jdbcConnectionReleaseTime,
jdbcConnectionReleaseCount,
jdbcPrepareStatementTime,
jdbcPrepareStatementCount,
jdbcExecuteStatementTime,
jdbcExecuteStatementCount,
jdbcExecuteBatchTime,
jdbcExecuteBatchCount,
cachePutTime,
cachePutCount,
cacheHitTime,
cacheHitCount,
cacheMissTime,
cacheMissCount,
flushTime,
flushCount,
flushEntityCount,
flushCollectionCount,
partialFlushTime,
partialFlushCount,
partialFlushEntityCount,
partialFlushCollectionCount
);
}
}
}

View File

@ -80,7 +80,7 @@ public class DefaultIdentifierGeneratorFactory
public void register(String strategy, Class generatorClass) {
LOG.debugf( "Registering IdentifierGenerator strategy [%s] -> [%s]", strategy, generatorClass.getName() );
final Class previous = generatorStrategyToClassNameMap.put( strategy, generatorClass );
if ( previous != null ) {
if ( previous != null && LOG.isDebugEnabled() ) {
LOG.debugf( " - overriding [%s]", previous.getName() );
}
}

View File

@ -43,7 +43,9 @@ public class IntegratorServiceImpl implements IntegratorService {
}
private void addIntegrator(Integrator integrator) {
LOG.debugf( "Adding Integrator [%s].", integrator.getClass().getName() );
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Adding Integrator [%s].", integrator.getClass().getName() );
}
integrators.add( integrator );
}

View File

@ -1501,7 +1501,9 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
* @throws IOException Can be thrown by the stream
*/
private void writeObject(ObjectOutputStream out) throws IOException {
LOG.debugf( "Serializing: %s", getUuid() );
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Serializing: %s", getUuid() );
}
out.defaultWriteObject();
LOG.trace( "Serialized" );
}
@ -1517,7 +1519,9 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
LOG.trace( "Deserializing" );
in.defaultReadObject();
LOG.debugf( "Deserialized: %s", getUuid() );
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Deserialized: %s", getUuid() );
}
}
/**

View File

@ -196,7 +196,9 @@ public class SessionFactoryRegistry {
private final NamespaceChangeListener listener = new NamespaceChangeListener() {
@Override
public void objectAdded(NamingEvent evt) {
LOG.debugf( "A factory was successfully bound to name: %s", evt.getNewBinding().getName() );
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "A factory was successfully bound to name: %s", evt.getNewBinding().getName() );
}
}
@Override

View File

@ -2959,12 +2959,12 @@ public class SessionImpl
checkOpen();
if ( !( value instanceof Serializable ) ) {
log.warnf( "Property '" + propertyName + "' is not serializable, value won't be set." );
log.warnf( "Property '%s' is not serializable, value won't be set.", propertyName );
return;
}
if ( propertyName == null ) {
log.warnf( "Property having key null is illegal; value won't be set." );
log.warn( "Property having key null is illegal; value won't be set." );
return;
}

View File

@ -313,7 +313,7 @@ public class StatelessSessionImpl extends AbstractSharedSessionContract implemen
if ( entityMetamodel.hasSubclasses() ) {
// entities with subclasses that define a ProxyFactory can create
// a HibernateProxy.
LOG.debugf( "Creating a HibernateProxy for to-one association with subclasses to honor laziness" );
LOG.debug( "Creating a HibernateProxy for to-one association with subclasses to honor laziness" );
return createProxy( entityKey );
}
return bytecodeEnhancementMetadata.createEnhancedProxy( entityKey, false, this );

View File

@ -93,12 +93,14 @@ public class HibernatePersistenceProvider implements PersistenceProvider {
}
for ( ParsedPersistenceXmlDescriptor persistenceUnit : units ) {
log.debugf(
"Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]",
persistenceUnit.getName(),
persistenceUnit.getProviderClassName(),
persistenceUnitName
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]",
persistenceUnit.getName(),
persistenceUnit.getProviderClassName(),
persistenceUnitName
);
}
final boolean matches = persistenceUnitName == null || persistenceUnit.getName().equals( persistenceUnitName );
if ( !matches ) {
@ -136,14 +138,18 @@ public class HibernatePersistenceProvider implements PersistenceProvider {
*/
@Override
public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map properties) {
log.tracef( "Starting createContainerEntityManagerFactory : %s", info.getPersistenceUnitName() );
if ( log.isTraceEnabled() ) {
log.tracef( "Starting createContainerEntityManagerFactory : %s", info.getPersistenceUnitName() );
}
return getEntityManagerFactoryBuilder( info, properties ).build();
}
@Override
public void generateSchema(PersistenceUnitInfo info, Map map) {
log.tracef( "Starting generateSchema : PUI.name=%s", info.getPersistenceUnitName() );
if ( log.isTraceEnabled() ) {
log.tracef( "Starting generateSchema : PUI.name=%s", info.getPersistenceUnitName() );
}
final EntityManagerFactoryBuilder builder = getEntityManagerFactoryBuilder( info, map );
builder.generateSchema();

View File

@ -515,8 +515,9 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
if ( keyString.startsWith( JACC_PREFIX ) ) {
if( !JACC_CONTEXT_ID.equals( keyString ) && !JACC_ENABLED.equals( keyString )) {
if ( jaccContextId == null ) {
LOG.debug(
"Found JACC permission grant [%s] in properties, but no JACC context id was specified; ignoring"
LOG.debugf(
"Found JACC permission grant [%s] in properties, but no JACC context id was specified; ignoring",
keyString
);
}
else {
@ -677,7 +678,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
if ( txnType == null ) {
// is it more appropriate to have this be based on bootstrap entry point (EE vs SE)?
LOG.debugf( "PersistenceUnitTransactionType not specified - falling back to RESOURCE_LOCAL" );
LOG.debug( "PersistenceUnitTransactionType not specified - falling back to RESOURCE_LOCAL" );
txnType = PersistenceUnitTransactionType.RESOURCE_LOCAL;
}

View File

@ -526,7 +526,9 @@ public class PersistenceXmlParser {
}
public void warning(SAXParseException warn) {
LOG.trace( extractInfo( warn ) );
if ( LOG.isTraceEnabled() ) {
LOG.trace( extractInfo( warn ) );
}
}
}

View File

@ -82,11 +82,13 @@ public final class ProviderChecker {
final String persistenceUnitRequestedProvider = extractProviderName( persistenceUnit );
if ( persistenceUnitRequestedProvider != null ) {
log.debugf(
"Persistence-unit [%s] requested PersistenceProvider [%s]",
persistenceUnit.getName(),
persistenceUnitRequestedProvider
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Persistence-unit [%s] requested PersistenceProvider [%s]",
persistenceUnit.getName(),
persistenceUnitRequestedProvider
);
}
return persistenceUnitRequestedProvider;
}

View File

@ -82,7 +82,7 @@ public class PersistenceUnitUtilImpl implements PersistenceUnitUtil, Serializabl
}
}
else {
log.debugf(
log.debug(
"javax.persistence.PersistenceUnitUtil.getIdentifier is only intended to work with enhanced entities " +
"(although Hibernate also adapts this support to its proxies); " +
"however the passed entity was not enhanced (nor a proxy).. may not be able to read identifier"

View File

@ -81,11 +81,11 @@ public class FlushModeTypeHelper {
}
try {
log.debug( "Attempting to interpret external setting [" + externalName + "] as FlushMode name" );
log.debugf( "Attempting to interpret external setting [%s] as FlushMode name", externalName );
return FlushMode.valueOf( externalName.toUpperCase( Locale.ROOT) );
}
catch ( IllegalArgumentException e ) {
log.debug( "Attempting to interpret external setting [" + externalName + "] as FlushModeType name" );
log.debugf( "Attempting to interpret external setting [%s] as FlushModeType name", externalName );
}
try {

View File

@ -193,7 +193,9 @@ class DatabaseSnapshotExecutor {
}
Object[] loadDatabaseSnapshot(Object id, SharedSessionContractImplementor session) {
log.tracef( "Getting current persistent state for `%s#%s`", entityDescriptor.getEntityName(), id );
if ( log.isTraceEnabled() ) {
log.tracef( "Getting current persistent state for `%s#%s`", entityDescriptor.getEntityName(), id );
}
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl(
entityDescriptor.getIdentifierMapping().getJdbcTypeCount( sessionFactory.getTypeConfiguration() )

View File

@ -425,7 +425,9 @@ public class LoaderSelectBuilder {
}
private List<Fetch> visitFetches(FetchParent fetchParent, QuerySpec querySpec, LoaderSqlAstCreationState creationState) {
log.tracef( "Starting visitation of FetchParent's Fetchables : %s", fetchParent.getNavigablePath() );
if ( log.isTraceEnabled() ) {
log.tracef( "Starting visitation of FetchParent's Fetchables : %s", fetchParent.getNavigablePath() );
}
final List<Fetch> fetches = new ArrayList<>();

View File

@ -101,7 +101,9 @@ public class MultiIdEntityLoaderStandardImpl<T> implements MultiIdEntityLoader<T
Object[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
log.tracef( "#performOrderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() );
if ( log.isTraceEnabled() ) {
log.tracef( "#performOrderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() );
}
assert loadOptions.isOrderReturnEnabled();
@ -235,7 +237,9 @@ public class MultiIdEntityLoaderStandardImpl<T> implements MultiIdEntityLoader<T
final int numberOfIdsInBatch = idsInBatch.size();
log.tracef( "#loadEntitiesById(`%s`, `%s`, ..)", entityDescriptor.getEntityName(), numberOfIdsInBatch );
if ( log.isTraceEnabled() ) {
log.tracef( "#loadEntitiesById(`%s`, `%s`, ..)", entityDescriptor.getEntityName(), numberOfIdsInBatch );
}
final List<JdbcParameter> jdbcParameters = new ArrayList<>( numberOfIdsInBatch * idJdbcTypeCount);
@ -378,7 +382,9 @@ public class MultiIdEntityLoaderStandardImpl<T> implements MultiIdEntityLoader<T
assert !loadOptions.isOrderReturnEnabled();
assert ids != null;
log.tracef( "#performUnorderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() );
if ( log.isTraceEnabled() ) {
log.tracef( "#performUnorderedMultiLoad(`%s`, ..)", entityDescriptor.getEntityName() );
}
final List<T> result = CollectionHelper.arrayList( ids.length );

View File

@ -190,7 +190,7 @@ public class BasicValue extends SimpleValue implements SqlTypeDescriptorIndicato
}
if ( this.column == incomingColumn ) {
log.debugf( "Skipping column re-registration: " + getTable().getName() + "." + column.getText() );
log.debugf( "Skipping column re-registration: %s.%s", getTable().getName(), column.getText() );
return;
}
@ -364,7 +364,7 @@ public class BasicValue extends SimpleValue implements SqlTypeDescriptorIndicato
final TypeDefinition autoAppliedTypeDef = getBuildingContext().getTypeDefinitionRegistry()
.resolveAutoApplied( (BasicJavaDescriptor<?>) jtd );
if ( autoAppliedTypeDef != null ) {
log.debugf( "BasicValue resolution matched auto-applied type-definition" );
log.debug( "BasicValue resolution matched auto-applied type-definition" );
return autoAppliedTypeDef.resolve( getTypeParameters(), null, getBuildingContext() );
}

View File

@ -32,11 +32,13 @@ public class PrimaryKey extends Constraint {
final Column next = columnIterator.next();
if ( next.getCanonicalName().equals( column.getCanonicalName() ) ) {
next.setNullable( false );
log.debugf(
"Forcing column [%s] to be non-null as it is part of the primary key for table [%s]",
column.getCanonicalName(),
getTableNameForLogging( column )
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Forcing column [%s] to be non-null as it is part of the primary key for table [%s]",
column.getCanonicalName(),
getTableNameForLogging( column )
);
}
}
}
super.addColumn( column );

View File

@ -260,11 +260,13 @@ public class Table implements RelationalModel, Serializable, Exportable {
for ( Column c : primaryKey.getColumns() ) {
if ( c.getCanonicalName().equals( column.getCanonicalName() ) ) {
column.setNullable( false );
log.debugf(
"Forcing column [%s] to be non-null as it is part of the primary key for table [%s]",
column.getCanonicalName(),
getNameIdentifier().getCanonicalName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Forcing column [%s] to be non-null as it is part of the primary key for table [%s]",
column.getCanonicalName(),
getNameIdentifier().getCanonicalName()
);
}
}
}
}

View File

@ -95,7 +95,7 @@ public class AttributeFactory {
LOG.tracef( "Skipping synthetic property %s(%s)", ownerType.getTypeName(), property.getName() );
return null;
}
LOG.trace( "Building attribute [" + ownerType.getTypeName() + "." + property.getName() + "]" );
LOG.tracef( "Building attribute [%s.%s]", ownerType.getTypeName(), property.getName() );
final AttributeContext<X> attributeContext = wrap( ownerType, property );
final AttributeMetadata<X, Y> attributeMetadata = determineAttributeMetadata(
attributeContext,
@ -149,7 +149,7 @@ public class AttributeFactory {
public <X, Y> SingularPersistentAttribute<X, Y> buildIdAttribute(
IdentifiableDomainType<X> ownerType,
Property property) {
LOG.trace( "Building identifier attribute [" + ownerType.getTypeName() + "." + property.getName() + "]" );
LOG.tracef( "Building identifier attribute [%s.%s]", ownerType.getTypeName(), property.getName() );
// ownerType = Entity(Person)
// MetadataContext#containerRoleStack -> Person
@ -185,7 +185,7 @@ public class AttributeFactory {
public <X, Y> SingularAttributeImpl<X, Y> buildVersionAttribute(
IdentifiableDomainType<X> ownerType,
Property property) {
LOG.trace( "Building version attribute [" + ownerType.getTypeName() + "." + property.getName() + "]" );
LOG.tracef( "Building version attribute [%s.%s]", ownerType.getTypeName(), property.getName() );
final SingularAttributeMetadata<X, Y> attributeMetadata = (SingularAttributeMetadata<X, Y>) determineAttributeMetadata(
wrap( ownerType, property ),
@ -354,14 +354,14 @@ public class AttributeFactory {
final Property propertyMapping = attributeContext.getPropertyMapping();
final String propertyName = propertyMapping.getName();
LOG.trace( "Starting attribute metadata determination [" + propertyName + "]" );
LOG.tracef( "Starting attribute metadata determination [%s]", propertyName );
final Member member = memberResolver.resolveMember( attributeContext, context );
LOG.trace( " Determined member [" + member + "]" );
LOG.tracef( " Determined member [%s]", member );
final Value value = propertyMapping.getValue();
final org.hibernate.type.Type type = value.getType();
LOG.trace( " Determined type [name=" + type.getName() + ", class=" + type.getClass().getName() + "]" );
LOG.tracef( " Determined type [name=%s, class=%s]", type.getName(), type.getClass().getName() );
if ( type.isAnyType() ) {
return new SingularAttributeMetadataImpl<>(

View File

@ -166,11 +166,13 @@ public class InflightRuntimeMetamodel {
// this part handles an odd case in the Hibernate test suite where we map an interface
// as the class and the proxy. I cannot think of a real life use case for that
// specific test, but..
log.debugf(
"Entity [%s] mapped same interface [%s] as class and proxy",
cp.getEntityName(),
cp.getMappedClass()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Entity [%s] mapped same interface [%s] as class and proxy",
cp.getEntityName(),
cp.getMappedClass()
);
}
}
else {
final String old = entityProxyInterfaceMap.put( cp.getConcreteProxyClass(), cp.getEntityName() );

View File

@ -293,11 +293,13 @@ public class PluralAttributeMappingImpl extends AbstractAttributeMapping impleme
};
if ( hasOrder ) {
log.debugf(
"Translating order-by fragment [%s] for collection role : %s",
bootDescriptor.getOrderBy(),
collectionDescriptor.getRole()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Translating order-by fragment [%s] for collection role : %s",
bootDescriptor.getOrderBy(),
collectionDescriptor.getRole()
);
}
orderByFragment = OrderByFragmentTranslator.translate(
bootDescriptor.getOrderBy(),
this,
@ -306,11 +308,13 @@ public class PluralAttributeMappingImpl extends AbstractAttributeMapping impleme
}
if ( hasManyToManyOrder ) {
log.debugf(
"Translating many-to-many order-by fragment [%s] for collection role : %s",
bootDescriptor.getOrderBy(),
collectionDescriptor.getRole()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Translating many-to-many order-by fragment [%s] for collection role : %s",
bootDescriptor.getOrderBy(),
collectionDescriptor.getRole()
);
}
manyToManyOrderByFragment = OrderByFragmentTranslator.translate(
bootDescriptor.getManyToManyOrdering(),
this,

View File

@ -52,7 +52,13 @@ public class OrderByFragmentTranslator {
String fragment,
PluralAttributeMapping pluralAttributeMapping,
TranslationContext context) {
LOG.tracef( "Beginning parsing of order-by fragment [%s] : %s", pluralAttributeMapping.getCollectionDescriptor().getRole(), fragment );
if ( LOG.isTraceEnabled() ) {
LOG.tracef(
"Beginning parsing of order-by fragment [%s] : %s",
pluralAttributeMapping.getCollectionDescriptor().getRole(),
fragment
);
}
final OrderingParser.OrderByFragmentContext parseTree = buildParseTree( context, fragment );

View File

@ -257,11 +257,13 @@ public class MappingMetamodelImpl implements MappingMetamodel, MetamodelImplemen
// this part handles an odd case in the Hibernate test suite where we map an interface
// as the class and the proxy. I cannot think of a real life use case for that
// specific test, but..
log.debugf(
"Entity [%s] mapped same interface [%s] as class and proxy",
cp.getEntityName(),
cp.getMappedClass()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Entity [%s] mapped same interface [%s] as class and proxy",
cp.getEntityName(),
cp.getMappedClass()
);
}
}
else {
final String old = entityProxyInterfaceMap.put( cp.getConcreteProxyClass(), cp.getEntityName() );

View File

@ -464,7 +464,7 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
selectClause = visitSelectClause( ctx.selectClause() );
}
else {
log.debugf( "Encountered implicit select clause : " + ctx.getText() );
log.debugf( "Encountered implicit select clause : %s", ctx.getText() );
selectClause = buildInferredSelectClause( sqmQuerySpec.getFromClause() );
}
sqmQuerySpec.setSelectClause( selectClause );

View File

@ -744,7 +744,7 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
throw new IllegalArgumentException( "Could not resolve parameter by position - " + position, e );
}
LOGGER.debugf( "Checking whether positional parameter [%s] is bound : %s", (Integer) position, (Boolean) binding.isBound() );
LOGGER.debugf( "Checking whether positional parameter [%s] is bound : %s", (Integer) position, (Boolean) binding.isBound() );
if ( !binding.isBound() ) {
throw new IllegalStateException( "Parameter value not yet bound : " + position );
}

View File

@ -121,7 +121,7 @@ public class QueryEngine {
serviceRegistry.getService( ConfigurationService.class ).getSettings(),
false
);
if ( showSQLFunctions ) {
if ( showSQLFunctions && LOG_HQL_FUNCTIONS.isInfoEnabled() ) {
sqmFunctionRegistry.getFunctionsByName().forEach(
entry -> LOG_HQL_FUNCTIONS.info( entry.getValue().getSignature( entry.getKey() ) )
);

View File

@ -32,7 +32,7 @@ public class SqmFunctionRegistry {
private final Map<String,String> alternateKeyMap = new TreeMap<>( CASE_INSENSITIVE_ORDER );
public SqmFunctionRegistry() {
log.tracef( "SqmFunctionRegistry created" );
log.trace( "SqmFunctionRegistry created" );
}
public Map<String, SqmFunctionDescriptor> getFunctions() {

View File

@ -65,7 +65,12 @@ public class MatchingIdSelectionHelper {
MultiTableSqmMutationConverter sqmConverter,
SessionFactoryImplementor sessionFactory) {
final EntityDomainType entityDomainType = sqmStatement.getTarget().getModel();
log.tracef( "Starting generation of entity-id SQM selection - %s", entityDomainType.getHibernateEntityName() );
if ( log.isTraceEnabled() ) {
log.tracef(
"Starting generation of entity-id SQM selection - %s",
entityDomainType.getHibernateEntityName()
);
}
final QuerySpec idSelectionQuery = new QuerySpec( true, 1 );
@ -122,8 +127,12 @@ public class MatchingIdSelectionHelper {
MultiTableSqmMutationConverter sqmConverter,
SessionFactoryImplementor sessionFactory) {
final EntityDomainType entityDomainType = sqmStatement.getTarget().getModel();
log.tracef( "Starting generation of entity-id SQM selection - %s", entityDomainType.getHibernateEntityName() );
if ( log.isTraceEnabled() ) {
log.tracef(
"Starting generation of entity-id SQM selection - %s",
entityDomainType.getHibernateEntityName()
);
}
final QuerySpec idSelectionQuery = new QuerySpec( true, 1 );

View File

@ -270,7 +270,7 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext) {
assert targetTableReference != null;
log.trace( "deleteFromNonRootTable - " + targetTableReference.getTableExpression() );
log.tracef( "deleteFromNonRootTable - %s", targetTableReference.getTableExpression() );
/*
* delete from sub_table
@ -449,7 +449,7 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
Supplier<Consumer<ColumnConsumer>> tableKeyColumnVisitationSupplier,
QuerySpec idTableSubQuery,
ExecutionContext executionContext) {
log.trace( "deleteFromTableUsingIdTable - " + tableExpression );
log.tracef( "deleteFromTableUsingIdTable - %s", tableExpression );
final SessionFactoryImplementor factory = executionContext.getSession().getFactory();

View File

@ -65,7 +65,12 @@ public class TableBasedDeleteHandler
@Override
public int execute(ExecutionContext executionContext) {
log.tracef( "Starting multi-table delete execution - %s", getSqmDeleteOrUpdateStatement().getRoot().getModel().getName() );
if ( log.isTraceEnabled() ) {
log.tracef(
"Starting multi-table delete execution - %s",
getSqmDeleteOrUpdateStatement().getRoot().getModel().getName()
);
}
return resolveDelegate( executionContext ).execute( executionContext );
}

View File

@ -102,7 +102,12 @@ public class TableBasedUpdateHandler
@Override
public int execute(ExecutionContext executionContext) {
log.tracef( "Starting multi-table update execution - %s", getSqmDeleteOrUpdateStatement().getRoot().getModel().getName() );
if ( log.isTraceEnabled() ) {
log.tracef(
"Starting multi-table update execution - %s",
getSqmDeleteOrUpdateStatement().getRoot().getModel().getName()
);
}
return resolveDelegate( executionContext ).execute( executionContext );
}

View File

@ -570,7 +570,9 @@ public abstract class BaseSqmToSqlAstConverter
}
protected void consumeExplicitJoins(SqmFrom<?,?> sqmFrom, TableGroup lhsTableGroup) {
log.tracef( "Visiting explicit joins for `%s`", sqmFrom.getNavigablePath() );
if ( log.isTraceEnabled() ) {
log.tracef( "Visiting explicit joins for `%s`", sqmFrom.getNavigablePath() );
}
sqmFrom.visitSqmJoins(
sqmJoin -> consumeExplicitJoin( sqmJoin, lhsTableGroup )
@ -783,11 +785,15 @@ public abstract class BaseSqmToSqlAstConverter
}
private void consumeImplicitJoins(SqmPath<?> sqmPath, TableGroup tableGroup) {
log.tracef( "Visiting implicit joins for `%s`", sqmPath.getNavigablePath() );
if ( log.isTraceEnabled() ) {
log.tracef( "Visiting implicit joins for `%s`", sqmPath.getNavigablePath() );
}
sqmPath.visitImplicitJoinPaths(
joinedPath -> {
log.tracef( "Starting implicit join handling for `%s`", joinedPath.getNavigablePath() );
if ( log.isTraceEnabled() ) {
log.tracef( "Starting implicit join handling for `%s`", joinedPath.getNavigablePath() );
}
assert getFromClauseAccess().findTableGroup( joinedPath.getLhs().getNavigablePath() ) == tableGroup;

View File

@ -42,7 +42,7 @@ public class FromClauseIndex extends SimpleFromClauseAccessImpl {
if ( sqmPath.getExplicitAlias() != null ) {
final TableGroup previousAliasReg = tableGroupByAliasXref.put( sqmPath.getExplicitAlias(), tableGroup );
if ( previousAliasReg != null ) {
if ( previousAliasReg != null && log.isDebugEnabled() ) {
log.debugf(
"Encountered previous TableGroup registration [%s] for alias : %s",
previousAliasReg,

View File

@ -81,11 +81,13 @@ public abstract class AbstractSqmAttributeJoin<O,T>
}
public void setJoinPredicate(SqmPredicate predicate) {
log.tracef(
"Setting join predicate [%s] (was [%s])",
predicate.toString(),
this.onClausePredicate == null ? "<null>" : this.onClausePredicate.toString()
);
if ( log.isTraceEnabled() ) {
log.tracef(
"Setting join predicate [%s] (was [%s])",
predicate.toString(),
this.onClausePredicate == null ? "<null>" : this.onClausePredicate.toString()
);
}
this.onClausePredicate = predicate;
}

View File

@ -126,7 +126,7 @@ public class SqmDynamicInstantiation<T>
public void addArgument(SqmDynamicInstantiationArgument argument) {
if ( instantiationTarget.getNature() == LIST ) {
// really should not have an alias...
if ( argument.getAlias() != null ) {
if ( argument.getAlias() != null && log.isDebugEnabled() ) {
log.debugf(
"Argument [%s] for dynamic List instantiation declared an 'injection alias' [%s] " +
"but such aliases are ignored for dynamic List instantiations",

View File

@ -25,7 +25,7 @@ public class CdiBeanContainerImmediateAccessImpl extends AbstractCdiBeanContaine
private final BeanManager beanManager;
private CdiBeanContainerImmediateAccessImpl(BeanManager beanManager) {
log.debugf( "Standard access requested to CDI BeanManager : " + beanManager );
log.debugf( "Standard access requested to CDI BeanManager : %s", beanManager );
this.beanManager = beanManager;
}

View File

@ -96,7 +96,7 @@ public class ContainerManagedLifecycleStrategy implements BeanLifecycleStrategy
throw e;
}
catch (Exception e) {
log.debugf( "Error resolving CDI bean - using fallback" );
log.debug( "Error resolving CDI bean - using fallback" );
this.beanInstance = produceFallbackInstance();
this.instance = null;
}

View File

@ -125,7 +125,7 @@ public class JpaCompliantLifecycleStrategy implements BeanLifecycleStrategy {
throw e;
}
catch (Exception e) {
log.debugf( "Error resolving CDI bean [%s] - using fallback" );
log.debugf( "Error resolving CDI bean [%s] - using fallback", beanType.getName() );
this.beanInstance = fallbackProducer.produceBeanInstance( beanType );
try {
@ -222,7 +222,7 @@ public class JpaCompliantLifecycleStrategy implements BeanLifecycleStrategy {
this.beanInstance = bean.create( creationalContext );
}
catch (Exception e) {
log.debugf( "Error resolving CDI bean [%s] - using fallback" );
log.debugf( "Error resolving CDI bean [%s] - using fallback", beanName );
this.beanInstance = fallbackProducer.produceBeanInstance( beanName, beanType );
try {

View File

@ -257,13 +257,13 @@ public class JdbcResourceLocalTransactionCoordinatorImpl implements TransactionC
public void commit() {
try {
if ( rollbackOnly ) {
log.debugf( "On commit, transaction was marked for roll-back only, rolling back" );
log.debug( "On commit, transaction was marked for roll-back only, rolling back" );
try {
rollback();
if ( jpaCompliance.isJpaTransactionComplianceEnabled() ) {
log.debugf( "Throwing RollbackException on roll-back of transaction marked rollback-only on commit" );
log.debug( "Throwing RollbackException on roll-back of transaction marked rollback-only on commit" );
throw new RollbackException( "Transaction was marked for rollback-only" );
}

View File

@ -50,7 +50,7 @@ public class EmbeddableForeignKeyResultImpl<T>
super( embeddableValuedModelPart.getEmbeddableTypeDescriptor(), navigablePath );
this.resultVariable = resultVariable;
fetches = new ArrayList<>();
MutableInteger index = new MutableInteger( 0 );
MutableInteger index = new MutableInteger();
embeddableValuedModelPart.visitFetchables(
fetchable -> {

View File

@ -65,7 +65,7 @@ public class DynamicInstantiation<T> implements DomainResultProducer {
if ( List.class.equals( getTargetJavaTypeDescriptor().getJavaType() ) ) {
// really should not have an alias...
if ( alias != null ) {
if ( alias != null && log.isDebugEnabled() ) {
log.debugf(
"Argument [%s] for dynamic List instantiation declared an 'injection alias' [%s] " +
"but such aliases are ignored for dynamic List instantiations",

View File

@ -146,12 +146,14 @@ public class DynamicInstantiationResultImpl<R> implements DynamicInstantiationRe
argumentReader.getAssembledJavaTypeDescriptor()
);
if ( !assignmentCompatible ) {
log.debugf(
"Skipping constructor for dynamic-instantiation match due to argument mismatch [%s] : %s -> %s",
i,
constructor.getParameterTypes()[i].getName(),
argumentTypeDescriptor.getJavaType().getName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Skipping constructor for dynamic-instantiation match due to argument mismatch [%s] : %s -> %s",
i,
constructor.getParameterTypes()[i].getName(),
argumentTypeDescriptor.getJavaType().getName()
);
}
continue constructor_loop;
}
}
@ -164,11 +166,12 @@ public class DynamicInstantiationResultImpl<R> implements DynamicInstantiationRe
);
}
log.debugf(
"Could not locate appropriate constructor for dynamic instantiation of [%s]; attempting bean-injection instantiation",
javaTypeDescriptor.getJavaType().getName()
);
if ( log.isDebugEnabled() ) {
log.debugf(
"Could not locate appropriate constructor for dynamic instantiation of [%s]; attempting bean-injection instantiation",
javaTypeDescriptor.getJavaType().getName()
);
}
if ( ! areAllArgumentsAliased ) {
throw new IllegalStateException(

View File

@ -80,7 +80,7 @@ public class LoadContexts {
*/
public void cleanup() {
if ( ! jdbcValuesSourceProcessingStateStack.isEmpty() ) {
log.debugf( "LoadContexts still contained JdbcValuesSourceProcessingState registrations on cleanup" );
log.debug( "LoadContexts still contained JdbcValuesSourceProcessingState registrations on cleanup" );
}
jdbcValuesSourceProcessingStateStack.clear();
}

View File

@ -162,11 +162,13 @@ public class EnumType<T extends Enum>
this.enumValueConverter = interpretParameters( parameters );
}
LOG.debugf(
"Using %s-based conversion for Enum %s",
isOrdinal() ? "ORDINAL" : "NAMED",
enumClass.getName()
);
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Using %s-based conversion for Enum %s",
isOrdinal() ? "ORDINAL" : "NAMED",
enumClass.getName()
);
}
}
private BasicJavaDescriptor<?> resolveRelationalJavaTypeDescriptor(

View File

@ -55,7 +55,7 @@ public class AttributeConverterTypeAdapter<T> extends AbstractSingleColumnStanda
? new AttributeConverterMutabilityPlanImpl<>( attributeConverter )
: ImmutableMutabilityPlan.INSTANCE;
log.debug( "Created AttributeConverterTypeAdapter -> " + name );
log.debugf( "Created AttributeConverterTypeAdapter -> %s", name );
// throw new UnsupportedOperationException( );
}

View File

@ -127,9 +127,8 @@ public class JavaTypeDescriptorRegistry implements Serializable {
if ( !AttributeConverter.class.isAssignableFrom( cls ) ) {
log.debugf(
"Could not find matching JavaTypeDescriptor for requested Java class [%s]; using fallback. " +
"This means Hibernate does not know how to perform certain basic operations in relation to this Java type." +
"",
"Could not find matching JavaTypeDescriptor for requested Java class [%s]; using fallback. " +
"This means Hibernate does not know how to perform certain basic operations in relation to this Java type.",
cls.getName()
);
checkEqualsAndHashCode( cls );

View File

@ -51,7 +51,7 @@ public class RegistryHelper {
// find the first "assignable" match
for ( Map.Entry<Class, JavaTypeDescriptor> entry : descriptorsByClass.entrySet() ) {
if ( entry.getKey().isAssignableFrom( cls ) ) {
log.debugf( "Using cached JavaTypeDescriptor instance for Java class [%s]", cls.getName() );
log.debugf( "Using cached JavaTypeDescriptor instance for Java class [%s]", cls.getName() );
return entry.getValue();
}
}

View File

@ -43,14 +43,14 @@ public class SqlTypeDescriptorRegistry implements SqlTypeDescriptorBaseline.Base
public void addDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
final SqlTypeDescriptor previous = descriptorMap.put( sqlTypeDescriptor.getSqlType(), sqlTypeDescriptor );
if ( previous != null && previous != sqlTypeDescriptor ) {
log.debugf( "addDescriptor(%s) replaced previous registration", sqlTypeDescriptor, previous );
log.debugf( "addDescriptor(%s) replaced previous registration(%s)", sqlTypeDescriptor, previous );
}
}
public void addDescriptor(int typeCode, SqlTypeDescriptor sqlTypeDescriptor) {
final SqlTypeDescriptor previous = descriptorMap.put( typeCode, sqlTypeDescriptor );
if ( previous != null && previous != sqlTypeDescriptor ) {
log.debugf( "addDescriptor(%i, %s) replaced previous registration", typeCode, sqlTypeDescriptor, previous );
log.debugf( "addDescriptor(%d, %s) replaced previous registration(%s)", typeCode, sqlTypeDescriptor, previous );
}
}