Replaced references to slf4j with references to new jboss.logging.Logger implementations and i18n'd where it was clear how to do so.

This commit is contained in:
JPAV 2010-12-21 15:51:02 -06:00
parent 126592a55d
commit a9b1425f3f
281 changed files with 13610 additions and 6690 deletions

View File

@ -61,6 +61,8 @@ libraries = [
jacc: 'org.jboss.javaee:jboss-jacc-api:1.1.0.GA',
// logging
logging: 'org.jboss.logging:jboss-logging:3.0.0.Beta4',
logging_tools: 'org.jboss.logging:jboss-logging-tools:1.0.0.Beta1',
slf4j_api: 'org.slf4j:slf4j-api:' + slf4jVersion,
slf4j_simple: 'org.slf4j:slf4j-simple:' + slf4jVersion,
jcl_slf4j: 'org.slf4j:jcl-over-slf4j:' + slf4jVersion,
@ -105,6 +107,8 @@ subprojects { subProject ->
// appropriately inject the common dependencies into each sub-project
dependencies {
compile(libraries.logging)
compile(libraries.logging_tools)
compile( libraries.slf4j_api )
testCompile( libraries.junit )
testRuntime( libraries.slf4j_simple )
@ -132,6 +136,7 @@ subprojects { subProject ->
test {
systemProperties['hibernate.test.validatefailureexpected'] = true
// systemProperties['jboss.i18n.generate-proxies'] = true
maxHeapSize = "1024m"
}

View File

@ -17,7 +17,9 @@ dependencies {
transitive = true
}
provided( libraries.ant )
provided( libraries.jacc )
provided( libraries.jacc ) {
transitive = false
}
provided( libraries.validation )
testCompile( libraries.validation )
testCompile( libraries.validator ) {

View File

@ -24,8 +24,10 @@
*/
package org.hibernate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.ERROR;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Indicates failure of an assertion: a possible bug in Hibernate.
@ -34,20 +36,30 @@ import org.slf4j.LoggerFactory;
*/
public class AssertionFailure extends RuntimeException {
private static final Logger log = LoggerFactory.getLogger( AssertionFailure.class );
private static final long serialVersionUID = 1L;
private static final String MESSAGE = "an assertion failure occured" +
" (this may indicate a bug in Hibernate, but is more likely due" +
" to unsafe use of the session)";
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, AssertionFailure.class.getName());
public AssertionFailure(String s) {
super( s );
log.error( MESSAGE, this );
}
public AssertionFailure( String s ) {
super(s);
LOG.failed(this);
}
public AssertionFailure(String s, Throwable t) {
super( s, t );
log.error( MESSAGE, t );
}
public AssertionFailure( String s,
Throwable t ) {
super(s, t);
LOG.failed(t);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger {
@LogMessage( level = ERROR )
@Message( value = "an assertion failure occured" + " (this may indicate a bug in Hibernate, but is more likely due"
+ " to unsafe use of the session): %s" )
void failed( Throwable throwable );
}
}

View File

@ -25,13 +25,11 @@
package org.hibernate.bytecode.cglib;
import java.lang.reflect.Modifier;
import net.sf.cglib.beans.BulkBean;
import net.sf.cglib.beans.BulkBeanException;
import net.sf.cglib.reflect.FastClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.bytecode.BytecodeProvider;
import org.hibernate.bytecode.Logger;
import org.hibernate.bytecode.ProxyFactoryFactory;
import org.hibernate.bytecode.ReflectionOptimizer;
import org.hibernate.bytecode.util.FieldFilter;
@ -46,70 +44,66 @@ import org.hibernate.util.StringHelper;
*/
@Deprecated
public class BytecodeProviderImpl implements BytecodeProvider {
private static final Logger log = LoggerFactory.getLogger( BytecodeProviderImpl.class );
public BytecodeProviderImpl() {
log.warn( "Per HHH-5451 support for cglib as a bytecode provider has been deprecated." );
}
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public ProxyFactoryFactory getProxyFactoryFactory() {
return new ProxyFactoryFactoryImpl();
}
public BytecodeProviderImpl() {
LOG.deprecated();
}
public ReflectionOptimizer getReflectionOptimizer(
Class clazz,
String[] getterNames,
String[] setterNames,
Class[] types) {
FastClass fastClass;
BulkBean bulkBean;
try {
fastClass = FastClass.create( clazz );
bulkBean = BulkBean.create( clazz, getterNames, setterNames, types );
if ( !clazz.isInterface() && !Modifier.isAbstract( clazz.getModifiers() ) ) {
if ( fastClass == null ) {
bulkBean = null;
}
else {
//test out the optimizer:
Object instance = fastClass.newInstance();
bulkBean.setPropertyValues( instance, bulkBean.getPropertyValues( instance ) );
}
}
}
catch( Throwable t ) {
fastClass = null;
bulkBean = null;
String message = "reflection optimizer disabled for: " +
clazz.getName() +
" [" +
StringHelper.unqualify( t.getClass().getName() ) +
": " +
t.getMessage();
public ProxyFactoryFactory getProxyFactoryFactory() {
return new ProxyFactoryFactoryImpl();
}
if (t instanceof BulkBeanException ) {
int index = ( (BulkBeanException) t ).getIndex();
if (index >= 0) {
message += " (property " + setterNames[index] + ")";
}
}
public ReflectionOptimizer getReflectionOptimizer(
Class clazz,
String[] getterNames,
String[] setterNames,
Class[] types) {
FastClass fastClass;
BulkBean bulkBean;
try {
fastClass = FastClass.create( clazz );
bulkBean = BulkBean.create( clazz, getterNames, setterNames, types );
if ( !clazz.isInterface() && !Modifier.isAbstract( clazz.getModifiers() ) ) {
if ( fastClass == null ) {
bulkBean = null;
}
else {
//test out the optimizer:
Object instance = fastClass.newInstance();
bulkBean.setPropertyValues( instance, bulkBean.getPropertyValues( instance ) );
}
}
}
catch( Throwable t ) {
fastClass = null;
bulkBean = null;
if (LOG.isDebugEnabled()) {
int index = 0;
if (t instanceof BulkBeanException) index = ((BulkBeanException)t).getIndex();
if (index >= 0) LOG.reflectionOptimizerDisabledForBulkException(clazz.getName(),
StringHelper.unqualify(t.getClass().getName()),
t.getMessage(),
setterNames[index]);
else LOG.reflectionOptimizerDisabled(clazz.getName(),
StringHelper.unqualify(t.getClass().getName()),
t.getMessage());
}
}
log.debug( message );
}
if ( fastClass != null && bulkBean != null ) {
return new ReflectionOptimizerImpl(
new InstantiationOptimizerAdapter( fastClass ),
new AccessOptimizerAdapter( bulkBean, clazz )
);
}
else {
return null;
}
}
public org.hibernate.bytecode.ClassTransformer getTransformer(org.hibernate.bytecode.util.ClassFilter classFilter, FieldFilter fieldFilter) {
return new CglibClassTransformer( classFilter, fieldFilter );
}
if ( fastClass != null && bulkBean != null ) {
return new ReflectionOptimizerImpl(
new InstantiationOptimizerAdapter( fastClass ),
new AccessOptimizerAdapter( bulkBean, clazz )
);
}
else {
return null;
}
}
public org.hibernate.bytecode.ClassTransformer getTransformer(org.hibernate.bytecode.util.ClassFilter classFilter, FieldFilter fieldFilter) {
return new CglibClassTransformer( classFilter, fieldFilter );
}
}

View File

@ -24,28 +24,26 @@
*/
package org.hibernate.bytecode.cglib;
import java.security.ProtectionDomain;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.ProtectionDomain;
import net.sf.cglib.core.ClassNameReader;
import net.sf.cglib.core.DebuggingClassWriter;
import net.sf.cglib.transform.ClassReaderGenerator;
import net.sf.cglib.transform.ClassTransformer;
import net.sf.cglib.transform.TransformingClassGenerator;
import net.sf.cglib.transform.ClassReaderGenerator;
import net.sf.cglib.transform.impl.InterceptFieldEnabled;
import net.sf.cglib.transform.impl.InterceptFieldFilter;
import net.sf.cglib.transform.impl.InterceptFieldTransformer;
import net.sf.cglib.core.ClassNameReader;
import net.sf.cglib.core.DebuggingClassWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.bytecode.AbstractClassTransformerImpl;
import org.hibernate.bytecode.util.FieldFilter;
import org.hibernate.bytecode.util.ClassFilter;
import org.hibernate.HibernateException;
import org.objectweb.asm.Type;
import org.hibernate.bytecode.AbstractClassTransformerImpl;
import org.hibernate.bytecode.Logger;
import org.hibernate.bytecode.util.ClassFilter;
import org.hibernate.bytecode.util.FieldFilter;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.Type;
/**
* Enhance the classes allowing them to implements InterceptFieldEnabled
@ -58,13 +56,14 @@ import org.objectweb.asm.ClassWriter;
@Deprecated
public class CglibClassTransformer extends AbstractClassTransformerImpl {
private static Logger log = LoggerFactory.getLogger( CglibClassTransformer.class.getName() );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public CglibClassTransformer(ClassFilter classFilter, FieldFilter fieldFilter) {
super( classFilter, fieldFilter );
}
protected byte[] doTransform(
@Override
protected byte[] doTransform(
ClassLoader loader,
String className,
Class classBeingRedefined,
@ -75,7 +74,7 @@ public class CglibClassTransformer extends AbstractClassTransformerImpl {
reader = new ClassReader( new ByteArrayInputStream( classfileBuffer ) );
}
catch (IOException e) {
log.error( "Unable to read class", e );
LOG.unableToReadClass(e.getMessage());
throw new HibernateException( "Unable to read class: " + e.getMessage() );
}
@ -83,9 +82,7 @@ public class CglibClassTransformer extends AbstractClassTransformerImpl {
ClassWriter w = new DebuggingClassWriter( ClassWriter.COMPUTE_MAXS );
ClassTransformer t = getClassTransformer( names );
if ( t != null ) {
if ( log.isDebugEnabled() ) {
log.debug( "Enhancing " + className );
}
LOG.enhancingClass(className);
ByteArrayOutputStream out;
byte[] result;
try {
@ -99,7 +96,7 @@ public class CglibClassTransformer extends AbstractClassTransformerImpl {
out.close();
}
catch (Exception e) {
log.error( "Unable to transform class", e );
LOG.unableToTransformClass(e.getMessage());
throw new HibernateException( "Unable to transform class: " + e.getMessage() );
}
return result;

View File

@ -25,11 +25,9 @@
package org.hibernate.bytecode.javassist;
import java.lang.reflect.Modifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.bytecode.BytecodeProvider;
import org.hibernate.bytecode.ClassTransformer;
import org.hibernate.bytecode.Logger;
import org.hibernate.bytecode.ProxyFactoryFactory;
import org.hibernate.bytecode.ReflectionOptimizer;
import org.hibernate.bytecode.util.ClassFilter;
@ -43,7 +41,7 @@ import org.hibernate.util.StringHelper;
*/
public class BytecodeProviderImpl implements BytecodeProvider {
private static final Logger log = LoggerFactory.getLogger( BytecodeProviderImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public ProxyFactoryFactory getProxyFactoryFactory() {
return new ProxyFactoryFactoryImpl();
@ -73,21 +71,17 @@ public class BytecodeProviderImpl implements BytecodeProvider {
catch ( Throwable t ) {
fastClass = null;
bulkAccessor = null;
String message = "reflection optimizer disabled for: " +
clazz.getName() +
" [" +
StringHelper.unqualify( t.getClass().getName() ) +
": " +
t.getMessage();
if ( t instanceof BulkAccessorException ) {
int index = ( ( BulkAccessorException ) t ).getIndex();
if ( index >= 0 ) {
message += " (property " + setterNames[index] + ")";
}
}
log.debug( message );
if (LOG.isDebugEnabled()) {
int index = 0;
if (t instanceof BulkAccessorException) index = ((BulkAccessorException)t).getIndex();
if (index >= 0) LOG.reflectionOptimizerDisabledForBulkException(clazz.getName(),
StringHelper.unqualify(t.getClass().getName()),
t.getMessage(),
setterNames[index]);
else LOG.reflectionOptimizerDisabled(clazz.getName(),
StringHelper.unqualify(t.getClass().getName()),
t.getMessage());
}
}
if ( fastClass != null && bulkAccessor != null ) {
@ -104,5 +98,4 @@ public class BytecodeProviderImpl implements BytecodeProvider {
public ClassTransformer getTransformer(ClassFilter classFilter, FieldFilter fieldFilter) {
return new JavassistClassTransformer( classFilter, fieldFilter );
}
}

View File

@ -30,12 +30,10 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.security.ProtectionDomain;
import javassist.bytecode.ClassFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.bytecode.AbstractClassTransformerImpl;
import org.hibernate.bytecode.Logger;
import org.hibernate.bytecode.util.ClassFilter;
/**
@ -47,13 +45,14 @@ import org.hibernate.bytecode.util.ClassFilter;
*/
public class JavassistClassTransformer extends AbstractClassTransformerImpl {
private static Logger log = LoggerFactory.getLogger( JavassistClassTransformer.class.getName() );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public JavassistClassTransformer(ClassFilter classFilter, org.hibernate.bytecode.util.FieldFilter fieldFilter) {
super( classFilter, fieldFilter );
}
protected byte[] doTransform(
@Override
protected byte[] doTransform(
ClassLoader loader,
String className,
Class classBeingRedefined,
@ -65,14 +64,12 @@ public class JavassistClassTransformer extends AbstractClassTransformerImpl {
classfile = new ClassFile( new DataInputStream( new ByteArrayInputStream( classfileBuffer ) ) );
}
catch (IOException e) {
log.error( "Unable to build enhancement metamodel for " + className );
LOG.unableToBuildEnhancementMetamodel(className);
return classfileBuffer;
}
FieldTransformer transformer = getFieldTransformer( classfile );
if ( transformer != null ) {
if ( log.isDebugEnabled() ) {
log.debug( "Enhancing " + className );
}
LOG.enhancingClass("Enhancing " + className);
DataOutputStream out = null;
try {
transformer.transform( classfile );
@ -82,7 +79,7 @@ public class JavassistClassTransformer extends AbstractClassTransformerImpl {
return byteStream.toByteArray();
}
catch (Exception e) {
log.error( "Unable to transform class", e );
LOG.unableToTransformClass(e.getMessage());
throw new HibernateException( "Unable to transform class: " + e.getMessage() );
}
finally {

View File

@ -25,13 +25,9 @@
package org.hibernate.cache;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.jndi.JndiHelper;
import org.hibernate.util.StringHelper;
@ -44,7 +40,8 @@ import org.hibernate.util.StringHelper;
*/
public abstract class AbstractJndiBoundCacheProvider implements CacheProvider {
private static final Logger log = LoggerFactory.getLogger( AbstractJndiBoundCacheProvider.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private Object cache;
protected void prepare(Properties properties) {
@ -88,7 +85,7 @@ public abstract class AbstractJndiBoundCacheProvider implements CacheProvider {
}
catch (NamingException ne) {
String msg = "Unable to retreive Cache from JNDI [" + jndiNamespace + "]";
log.info( msg, ne );
LOG.unableToRetrieveCache(jndiNamespace, ne.getMessage());
throw new CacheException( msg );
}
finally {
@ -97,12 +94,12 @@ public abstract class AbstractJndiBoundCacheProvider implements CacheProvider {
ctx.close();
}
catch( NamingException ne ) {
log.info( "Unable to release initial context", ne );
LOG.unableToReleaseContext(ne.getMessage());
}
}
}
}
public Object getCache() {
return cache;
}

View File

@ -25,10 +25,6 @@
package org.hibernate.cache;
import java.util.Comparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.access.SoftLock;
/**
@ -43,9 +39,9 @@ import org.hibernate.cache.access.SoftLock;
*/
public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
private Cache cache;
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private static final Logger log = LoggerFactory.getLogger( NonstrictReadWriteCache.class );
private Cache cache;
public NonstrictReadWriteCache() {
}
@ -62,16 +58,14 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
* Get the most recent version, if available.
*/
public Object get(Object key, long txTimestamp) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "Cache lookup: " + key );
}
LOG.lookup(key);
Object result = cache.get( key );
if ( result != null ) {
log.debug( "Cache hit" );
LOG.hit(key);
}
else {
log.debug( "Cache miss" );
LOG.miss(key);
}
return result;
}
@ -87,14 +81,10 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
Comparator versionComparator,
boolean minimalPut) throws CacheException {
if ( minimalPut && cache.get( key ) != null ) {
if ( log.isDebugEnabled() ) {
log.debug( "item already cached: " + key );
}
LOG.exists(key);
return false;
}
if ( log.isDebugEnabled() ) {
log.debug( "Caching: " + key );
}
LOG.caching(key);
cache.put( key, value );
return true;
@ -111,16 +101,12 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
}
public void remove(Object key) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "Removing: " + key );
}
LOG.removing(key);
cache.remove( key );
}
public void clear() throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "Clearing" );
}
LOG.clearing();
cache.clear();
}
@ -129,7 +115,7 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
cache.destroy();
}
catch ( Exception e ) {
log.warn( "could not destroy cache", e );
LOG.unableToDestroyCache(e.getMessage());
}
}
@ -137,10 +123,7 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
* Invalidate the item
*/
public void evict(Object key) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "Invalidating: " + key );
}
LOG.invalidating(key);
cache.remove( key );
}
@ -163,10 +146,7 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
* Invalidate the item (again, for safety).
*/
public void release(Object key, SoftLock lock) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "Invalidating (again): " + key );
}
LOG.invalidating(key);
cache.remove( key );
}
@ -189,7 +169,8 @@ public class NonstrictReadWriteCache implements CacheConcurrencyStrategy {
return cache.getRegionName();
}
public String toString() {
@Override
public String toString() {
return cache + "(nonstrict-read-write)";
}
}

View File

@ -25,10 +25,6 @@
package org.hibernate.cache;
import java.util.Comparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.access.SoftLock;
/**
@ -36,12 +32,13 @@ import org.hibernate.cache.access.SoftLock;
* @see CacheConcurrencyStrategy
*/
public class ReadOnlyCache implements CacheConcurrencyStrategy {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private Cache cache;
private static final Logger log = LoggerFactory.getLogger(ReadOnlyCache.class);
public ReadOnlyCache() {}
public void setCache(Cache cache) {
this.cache=cache;
}
@ -53,46 +50,46 @@ public class ReadOnlyCache implements CacheConcurrencyStrategy {
public String getRegionName() {
return cache.getRegionName();
}
public synchronized Object get(Object key, long timestamp) throws CacheException {
Object result = cache.get(key);
if ( result!=null && log.isDebugEnabled() ) log.debug("Cache hit: " + key);
if (result != null) LOG.hit(key);
return result;
}
/**
* Unsupported!
*/
public SoftLock lock(Object key, Object version) {
log.error("Application attempted to edit read only item: " + key);
LOG.invalidEditOfReadOnlyItem(key);
throw new UnsupportedOperationException("Can't write to a readonly object");
}
public synchronized boolean put(
Object key,
Object value,
long timestamp,
Object version,
Object key,
Object value,
long timestamp,
Object version,
Comparator versionComparator,
boolean minimalPut)
boolean minimalPut)
throws CacheException {
if ( minimalPut && cache.get(key)!=null ) {
if ( log.isDebugEnabled() ) log.debug("item already cached: " + key);
LOG.exists(key);
return false;
}
if ( log.isDebugEnabled() ) log.debug("Caching: " + key);
LOG.caching(key);
cache.put(key, value);
return true;
}
/**
* Unsupported!
*/
public void release(Object key, SoftLock lock) {
log.error("Application attempted to edit read only item: " + key);
LOG.invalidEditOfReadOnlyItem(key);
//throw new UnsupportedOperationException("Can't write to a readonly object");
}
public void clear() throws CacheException {
cache.clear();
}
@ -100,13 +97,13 @@ public class ReadOnlyCache implements CacheConcurrencyStrategy {
public void remove(Object key) throws CacheException {
cache.remove(key);
}
public void destroy() {
try {
cache.destroy();
}
catch (Exception e) {
log.warn("could not destroy cache", e);
LOG.unableToDestroyCache(e.getMessage());
}
}
@ -114,15 +111,15 @@ public class ReadOnlyCache implements CacheConcurrencyStrategy {
* Unsupported!
*/
public boolean afterUpdate(Object key, Object value, Object version, SoftLock lock) throws CacheException {
log.error("Application attempted to edit read only item: " + key);
LOG.invalidEditOfReadOnlyItem(key);
throw new UnsupportedOperationException("Can't write to a readonly object");
}
/**
* Do nothing.
*/
public boolean afterInsert(Object key, Object value, Object version) throws CacheException {
if ( log.isDebugEnabled() ) log.debug("Caching after insert: " + key);
public boolean afterInsert(Object key, Object value, Object version) throws CacheException {
LOG.cachingAfterInsert(key);
cache.update(key, value);
return true;
}
@ -145,11 +142,12 @@ public class ReadOnlyCache implements CacheConcurrencyStrategy {
* Unsupported!
*/
public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) {
log.error("Application attempted to edit read only item: " + key);
LOG.invalidEditOfReadOnlyItem(key);
throw new UnsupportedOperationException("Can't write to a readonly object");
}
public String toString() {
@Override
public String toString() {
return cache + "(read-only)";
}

View File

@ -26,10 +26,6 @@ package org.hibernate.cache;
import java.io.Serializable;
import java.util.Comparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.access.SoftLock;
/**
@ -50,7 +46,7 @@ import org.hibernate.cache.access.SoftLock;
*/
public class ReadWriteCache implements CacheConcurrencyStrategy {
private static final Logger log = LoggerFactory.getLogger(ReadWriteCache.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private Cache cache;
private int nextLockId;
@ -68,7 +64,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
public String getRegionName() {
return cache.getRegionName();
}
/**
* Generate an id for a new lock. Uniqueness per cache instance is very
* desirable but not absolutely critical. Must be called from one of the
@ -96,35 +92,16 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* the data is versioned or timestamped.
*/
public synchronized Object get(Object key, long txTimestamp) throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Cache lookup: " + key);
/*try {
cache.lock(key);*/
Lockable lockable = (Lockable) cache.get(key);
boolean gettable = lockable!=null && lockable.isGettable(txTimestamp);
if (gettable) {
if ( log.isTraceEnabled() ) log.trace("Cache hit: " + key);
return ( (Item) lockable ).getValue();
}
else {
if ( log.isTraceEnabled() ) {
if (lockable==null) {
log.trace("Cache miss: " + key);
}
else {
log.trace("Cached item was locked: " + key);
}
}
return null;
}
/*}
finally {
cache.unlock(key);
}*/
LOG.lookup(key);
Lockable lockable = (Lockable)cache.get(key);
boolean gettable = lockable != null && lockable.isGettable(txTimestamp);
if (gettable) {
LOG.hit(key);
return ((Item)lockable).getValue();
}
if (lockable == null) LOG.miss(key);
else LOG.locked(key);
return null;
}
/**
@ -135,8 +112,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* item.
*/
public synchronized SoftLock lock(Object key, Object version) throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Invalidating: " + key);
LOG.invalidating(key);
try {
cache.lock(key);
@ -163,37 +139,31 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* version.
*/
public synchronized boolean put(
Object key,
Object value,
long txTimestamp,
Object version,
Object key,
Object value,
long txTimestamp,
Object version,
Comparator versionComparator,
boolean minimalPut)
boolean minimalPut)
throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Caching: " + key);
LOG.caching(key);
try {
cache.lock(key);
Lockable lockable = (Lockable) cache.get(key);
boolean puttable = lockable==null ||
boolean puttable = lockable==null ||
lockable.isPuttable(txTimestamp, version, versionComparator);
if (puttable) {
cache.put( key, new Item( value, version, cache.nextTimestamp() ) );
if ( log.isTraceEnabled() ) log.trace("Cached: " + key);
LOG.cached(key);
return true;
}
else {
if ( log.isTraceEnabled() ) {
if ( lockable.isLock() ) {
log.trace("Item was locked: " + key);
}
else {
log.trace("Item was already cached: " + key);
}
}
if (lockable.isLock()) LOG.locked(key);
else LOG.exists(key);
return false;
}
}
@ -217,7 +187,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* simultaneous lock).
*/
public synchronized void release(Object key, SoftLock clientLock) throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Releasing: " + key);
LOG.releasing(key);
try {
cache.lock(key);
@ -236,7 +206,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
}
void handleLockExpiry(Object key) throws CacheException {
log.warn("An item was expired by the cache while it was locked (increase your cache timeout): " + key);
LOG.expired(key);
long ts = cache.nextTimestamp() + cache.getTimeout();
// create new lock that times out immediately
Lock lock = new Lock( ts, nextLockId(), null );
@ -257,7 +227,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
cache.destroy();
}
catch (Exception e) {
log.warn("could not destroy cache", e);
LOG.unableToDestroyCache(e.getMessage());
}
}
@ -265,10 +235,10 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* Re-cache the updated state, if and only if there there are
* no other concurrent soft locks. Release our lock.
*/
public synchronized boolean afterUpdate(Object key, Object value, Object version, SoftLock clientLock)
public synchronized boolean afterUpdate(Object key, Object value, Object version, SoftLock clientLock)
throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Updating: " + key);
LOG.updating(key);
try {
cache.lock(key);
@ -285,7 +255,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
else {
//recache the updated state
cache.update( key, new Item( value, version, cache.nextTimestamp() ) );
if ( log.isTraceEnabled() ) log.trace("Updated: " + key);
LOG.updated(key);
return true;
}
}
@ -304,17 +274,17 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
* Add the new item to the cache, checking that no other transaction has
* accessed the item.
*/
public synchronized boolean afterInsert(Object key, Object value, Object version)
public synchronized boolean afterInsert(Object key, Object value, Object version)
throws CacheException {
if ( log.isTraceEnabled() ) log.trace("Inserting: " + key);
LOG.inserting(key);
try {
cache.lock(key);
Lockable lockable = (Lockable) cache.get(key);
if (lockable==null) {
cache.update( key, new Item( value, version, cache.nextTimestamp() ) );
if ( log.isTraceEnabled() ) log.trace("Inserted: " + key);
LOG.inserted(key);
return true;
}
else {
@ -426,7 +396,8 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
return version!=null && comparator.compare(version, newVersion) < 0;
}
public String toString() {
@Override
public String toString() {
return "Item{version=" + version +
",freshTimestamp=" + freshTimestamp;
}
@ -482,7 +453,7 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
public boolean isPuttable(long txTimestamp, Object newVersion, Comparator comparator) {
if (timeout < txTimestamp) return true;
if (multiplicity>0) return false;
return version==null ?
return version==null ?
unlockTimestamp < txTimestamp :
comparator.compare(version, newVersion) < 0; //by requiring <, we rely on lock timeout in the case of an unsuccessful update!
}
@ -509,7 +480,8 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
public int getId() { return id; }
public String toString() {
@Override
public String toString() {
return "Lock{id=" + id +
",version=" + version +
",multiplicity=" + multiplicity +
@ -518,7 +490,8 @@ public class ReadWriteCache implements CacheConcurrencyStrategy {
}
public String toString() {
@Override
public String toString() {
return cache + "(read-write)";
}

View File

@ -28,12 +28,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.persistence.EntityNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.UnresolvableObjectException;
import org.hibernate.cfg.Settings;
@ -52,7 +47,7 @@ import org.hibernate.type.TypeHelper;
*/
public class StandardQueryCache implements QueryCache {
private static final Logger log = LoggerFactory.getLogger( StandardQueryCache.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private QueryResultsRegion cacheRegion;
private UpdateTimestampsCache updateTimestampsCache;
@ -73,7 +68,7 @@ public class StandardQueryCache implements QueryCache {
if ( prefix != null ) {
regionName = prefix + '.' + regionName;
}
log.info( "starting query cache at region: " + regionName );
LOG.startingQueryCache(regionName);
this.cacheRegion = settings.getRegionFactory().buildQueryResultsRegion( regionName, props );
this.updateTimestampsCache = updateTimestampsCache;
@ -92,14 +87,10 @@ public class StandardQueryCache implements QueryCache {
else {
Long ts = new Long( session.getFactory().getSettings().getRegionFactory().nextTimestamp());
if ( log.isDebugEnabled() ) {
log.debug( "caching query results in region: " + cacheRegion.getName() + "; timestamp=" + ts );
}
LOG.cachingQueryResults(cacheRegion.getName(), ts);
List cacheable = new ArrayList( result.size() + 1 );
if ( log.isTraceEnabled() ) {
logCachedResultDetails( key, null, returnTypes, cacheable );
}
logCachedResultDetails(key, null, returnTypes, cacheable);
cacheable.add( ts );
for ( Object aResult : result ) {
if ( returnTypes.length == 1 ) {
@ -110,9 +101,7 @@ public class StandardQueryCache implements QueryCache {
TypeHelper.disassemble( (Object[]) aResult, returnTypes, null, session, null )
);
}
if ( log.isTraceEnabled() ) {
logCachedResultRowDetails( returnTypes, aResult );
}
logCachedResultRowDetails(returnTypes, aResult);
}
cacheRegion.put( key, cacheable );
@ -127,27 +116,23 @@ public class StandardQueryCache implements QueryCache {
boolean isNaturalKeyLookup,
Set spaces,
SessionImplementor session) throws HibernateException {
if ( log.isDebugEnabled() ) {
log.debug( "checking cached query results in region: " + cacheRegion.getName() );
}
LOG.checkingQueryResults(cacheRegion.getName());
List cacheable = ( List ) cacheRegion.get( key );
if ( log.isTraceEnabled() ) {
logCachedResultDetails( key, spaces, returnTypes, cacheable );
}
logCachedResultDetails(key, spaces, returnTypes, cacheable);
if ( cacheable == null ) {
log.debug( "query results were not found in cache" );
LOG.queryResultsNotFound();
return null;
}
Long timestamp = ( Long ) cacheable.get( 0 );
if ( !isNaturalKeyLookup && !isUpToDate( spaces, timestamp ) ) {
log.debug( "cached query results were not up to date" );
LOG.queryResultsNotUpToDate();
return null;
}
log.debug( "returning cached query results" );
LOG.returningQueryResults();
for ( int i = 1; i < cacheable.size(); i++ ) {
if ( returnTypes.length == 1 ) {
returnTypes[0].beforeAssemble( ( Serializable ) cacheable.get( i ), session );
@ -167,9 +152,7 @@ public class StandardQueryCache implements QueryCache {
TypeHelper.assemble( ( Serializable[] ) cacheable.get( i ), returnTypes, session, null )
);
}
if ( log.isTraceEnabled() ) {
logCachedResultRowDetails( returnTypes, result.get( i - 1 ));
}
logCachedResultRowDetails(returnTypes, result.get(i - 1));
}
catch ( RuntimeException ex ) {
if ( isNaturalKeyLookup &&
@ -179,7 +162,7 @@ public class StandardQueryCache implements QueryCache {
// the uoe could occur while resolving
// associations, leaving the PC in an
// inconsistent state
log.debug( "could not reassemble cached result set" );
LOG.unableToReassembleResultSet();
cacheRegion.evict( key );
return null;
}
@ -192,9 +175,7 @@ public class StandardQueryCache implements QueryCache {
}
protected boolean isUpToDate(Set spaces, Long timestamp) {
if ( log.isDebugEnabled() ) {
log.debug( "Checking query spaces for up-to-dateness: " + spaces );
}
LOG.checkingQuerySpacesUpToDate(spaces);
return updateTimestampsCache.isUpToDate( spaces, timestamp );
}
@ -203,7 +184,7 @@ public class StandardQueryCache implements QueryCache {
cacheRegion.destroy();
}
catch ( Exception e ) {
log.warn( "could not destroy query cache: " + cacheRegion.getName(), e );
LOG.unableToDestroyQueryCache(cacheRegion.getName(), e.getMessage());
}
}
@ -211,19 +192,18 @@ public class StandardQueryCache implements QueryCache {
return cacheRegion;
}
public String toString() {
@Override
public String toString() {
return "StandardQueryCache(" + cacheRegion.getName() + ')';
}
private static void logCachedResultDetails(QueryKey key, Set querySpaces, Type[] returnTypes, List result) {
if ( ! log.isTraceEnabled() ) {
return;
}
log.trace( "key.hashCode="+key.hashCode() );
log.trace( "querySpaces="+querySpaces );
if (!LOG.isTraceEnabled()) return;
LOG.key(key.hashCode());
LOG.querySpaces(querySpaces);
if ( returnTypes == null || returnTypes.length == 0 ) {
log.trace( "unexpected returnTypes is "+( returnTypes == null ? "null" : "empty" )+
"! result"+( result == null ? " is null": ".size()=" + result.size() ) );
LOG.unexpectedReturnTypes(returnTypes == null ? "null" : "empty",
result == null ? " is null" : ".size()=" + result.size());
}
else {
StringBuffer returnTypeInfo = new StringBuffer();
@ -233,14 +213,12 @@ public class StandardQueryCache implements QueryCache {
.append(" class=" )
.append( returnTypes[ i ].getReturnedClass().getName() ).append(' ');
}
log.trace( " returnTypeInfo="+returnTypeInfo );
LOG.returnTypeInfo(returnTypeInfo.toString());
}
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object result) {
if ( ! log.isTraceEnabled() ) {
return;
}
if (!LOG.isTraceEnabled()) return;
logCachedResultRowDetails(
returnTypes,
( result instanceof Object[] ? ( Object[] ) result : new Object[] { result } )
@ -248,37 +226,19 @@ public class StandardQueryCache implements QueryCache {
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object[] tuple) {
if ( ! log.isTraceEnabled() ) {
return;
}
if (!LOG.isTraceEnabled()) return;
if ( tuple == null ) {
log.trace( " tuple is null; returnTypes is "+( returnTypes == null ? "null" : "Type["+returnTypes.length+"]" ) );
if ( returnTypes != null && returnTypes.length > 1 ) {
log.trace( "unexpected result tuple! "+
"tuple is null; should be Object["+returnTypes.length+"]!" );
}
LOG.nullTuple(returnTypes == null ? "null" : "Type[" + returnTypes.length + "]");
if (returnTypes != null && returnTypes.length > 1) LOG.unexpectedNullTupleResult(returnTypes.length);
}
else {
if ( returnTypes == null || returnTypes.length == 0 ) {
log.trace( "unexpected result tuple! "+
"tuple is non-null; returnTypes is "+( returnTypes == null ? "null" : "empty" ) );
}
log.trace( " tuple is Object["+tuple.length+
"]; returnTypes is Type["+returnTypes.length+"]" );
if ( tuple.length != returnTypes.length ) {
log.trace( "unexpected tuple length! transformer="+
" expected="+returnTypes.length+
" got="+tuple.length );
}
else {
for ( int j = 0; j < tuple.length; j++ ) {
if ( tuple[ j ] != null && ! returnTypes[ j ].getReturnedClass().isInstance( tuple[ j ] ) ) {
log.trace( "unexpected tuple value type! transformer="+
" expected="+returnTypes[ j ].getReturnedClass().getName()+
" got="+tuple[ j ].getClass().getName() );
}
}
}
if (returnTypes == null || returnTypes.length == 0) LOG.unexpectedNonNullTupleResult(returnTypes == null ? "null" : "empty");
LOG.tupleAndReturnTypes(tuple.length, returnTypes.length);
if (tuple.length != returnTypes.length) LOG.unexpectedTupleCount(returnTypes.length, tuple.length);
else for (int j = 0; j < tuple.length; j++) {
if (tuple[j] != null && !returnTypes[j].getReturnedClass().isInstance(tuple[j])) LOG.unexpectedTupleValueType(returnTypes[j].getReturnedClass().getName(),
tuple[j].getClass().getName());
}
}
}
}

View File

@ -25,10 +25,6 @@
package org.hibernate.cache;
import java.util.Comparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.access.SoftLock;
/**
@ -41,7 +37,7 @@ import org.hibernate.cache.access.SoftLock;
*/
public class TransactionalCache implements CacheConcurrencyStrategy {
private static final Logger log = LoggerFactory.getLogger( TransactionalCache.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private Cache cache;
@ -50,13 +46,10 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
}
public Object get(Object key, long txTimestamp) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "cache lookup: " + key );
}
LOG.lookup(key);
Object result = cache.read( key );
if ( log.isDebugEnabled() ) {
log.debug( result == null ? "cache miss" : "cache hit" );
}
if (result == null) LOG.miss(key);
else LOG.hit(key);
return result;
}
@ -68,14 +61,10 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
Comparator versionComparator,
boolean minimalPut) throws CacheException {
if ( minimalPut && cache.read( key ) != null ) {
if ( log.isDebugEnabled() ) {
log.debug( "item already cached: " + key );
}
LOG.exists(key);
return false;
}
if ( log.isDebugEnabled() ) {
log.debug( "caching: " + key );
}
LOG.caching(key);
if ( cache instanceof OptimisticCache ) {
( ( OptimisticCache ) cache ).writeLoad( key, value, version );
}
@ -105,9 +94,7 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
Object value,
Object currentVersion,
Object previousVersion) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "updating: " + key );
}
LOG.updating(key);
if ( cache instanceof OptimisticCache ) {
( ( OptimisticCache ) cache ).writeUpdate( key, value, currentVersion, previousVersion );
}
@ -121,9 +108,7 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
Object key,
Object value,
Object currentVersion) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "inserting: " + key );
}
LOG.inserting(key);
if ( cache instanceof OptimisticCache ) {
( ( OptimisticCache ) cache ).writeInsert( key, value, currentVersion );
}
@ -138,14 +123,12 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
}
public void remove(Object key) throws CacheException {
if ( log.isDebugEnabled() ) {
log.debug( "removing: " + key );
}
LOG.removing(key);
cache.remove( key );
}
public void clear() throws CacheException {
log.debug( "clearing" );
LOG.clearing();
cache.clear();
}
@ -154,7 +137,7 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
cache.destroy();
}
catch ( Exception e ) {
log.warn( "could not destroy cache", e );
LOG.unableToDestroyCache(e.getMessage());
}
}
@ -187,7 +170,8 @@ public class TransactionalCache implements CacheConcurrencyStrategy {
return false;
}
public String toString() {
@Override
public String toString() {
return cache + "(transactional)";
}

View File

@ -28,10 +28,6 @@ import java.io.Serializable;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Settings;
@ -47,14 +43,14 @@ import org.hibernate.cfg.Settings;
*/
public class UpdateTimestampsCache {
public static final String REGION_NAME = UpdateTimestampsCache.class.getName();
private static final Logger log = LoggerFactory.getLogger( UpdateTimestampsCache.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private final TimestampsRegion region;
public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException {
String prefix = settings.getCacheRegionPrefix();
String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME;
log.info( "starting update timestamps cache at region: " + regionName );
LOG.startingUpdateTimestampsCache(regionName);
this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props );
}
@ -62,9 +58,7 @@ public class UpdateTimestampsCache {
//TODO: to handle concurrent writes correctly, this should return a Lock to the client
Long ts = new Long( region.nextTimestamp() + region.getTimeout() );
for ( int i=0; i<spaces.length; i++ ) {
if ( log.isDebugEnabled() ) {
log.debug( "Pre-invalidating space [" + spaces[i] + "]" );
}
LOG.preInvalidatingSpace(spaces[i]);
//put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync
region.put( spaces[i], ts );
@ -77,9 +71,7 @@ public class UpdateTimestampsCache {
Long ts = new Long( region.nextTimestamp() );
//TODO: if lock.getTimestamp().equals(ts)
for ( int i=0; i<spaces.length; i++ ) {
if ( log.isDebugEnabled() ) {
log.debug( "Invalidating space [" + spaces[i] + "], timestamp: " + ts);
}
LOG.invalidatingSpace(spaces[i], ts);
//put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync
region.put( spaces[i], ts );
@ -98,9 +90,7 @@ public class UpdateTimestampsCache {
//result = false; // safer
}
else {
if ( log.isDebugEnabled() ) {
log.debug("[" + space + "] last update timestamp: " + lastUpdate + ", result set timestamp: " + timestamp );
}
LOG.spaceLastUpdated(space, lastUpdate, timestamp);
if ( lastUpdate.longValue() >= timestamp.longValue() ) {
return false;
}
@ -118,15 +108,16 @@ public class UpdateTimestampsCache {
region.destroy();
}
catch (Exception e) {
log.warn("could not destroy UpdateTimestamps cache", e);
LOG.unableToDestroyUpdateTimestampsCache(region.getName(), e.getMessage());
}
}
public TimestampsRegion getRegion() {
return region;
}
public String toString() {
@Override
public String toString() {
return "UpdateTimestampeCache";
}

View File

@ -24,21 +24,18 @@
*/
package org.hibernate.cache.impl.bridge;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.CollectionRegion;
import org.hibernate.cache.Cache;
import org.hibernate.cache.CacheDataDescription;
import org.hibernate.cache.OptimisticCache;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.CacheConcurrencyStrategy;
import org.hibernate.cache.TransactionalCache;
import org.hibernate.cache.ReadWriteCache;
import org.hibernate.cache.CacheDataDescription;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.CollectionRegion;
import org.hibernate.cache.NonstrictReadWriteCache;
import org.hibernate.cache.OptimisticCache;
import org.hibernate.cache.ReadOnlyCache;
import org.hibernate.cache.access.CollectionRegionAccessStrategy;
import org.hibernate.cache.ReadWriteCache;
import org.hibernate.cache.TransactionalCache;
import org.hibernate.cache.access.AccessType;
import org.hibernate.cache.access.CollectionRegionAccessStrategy;
import org.hibernate.cfg.Settings;
/**
@ -47,7 +44,8 @@ import org.hibernate.cfg.Settings;
* @author Steve Ebersole
*/
public class CollectionRegionAdapter extends BaseTransactionalDataRegionAdapter implements CollectionRegion {
private static final Logger log = LoggerFactory.getLogger( CollectionRegionAdapter.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public CollectionRegionAdapter(Cache underlyingCache, Settings settings, CacheDataDescription metadata) {
super( underlyingCache, settings, metadata );
@ -59,9 +57,7 @@ public class CollectionRegionAdapter extends BaseTransactionalDataRegionAdapter
public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
CacheConcurrencyStrategy ccs;
if ( AccessType.READ_ONLY.equals( accessType ) ) {
if ( metadata.isMutable() ) {
log.warn( "read-only cache configured for mutable collection [" + getName() + "]" );
}
if (metadata.isMutable()) LOG.readOnlyCacheConfiguredForMutableCollection(getName());
ccs = new ReadOnlyCache();
}
else if ( AccessType.READ_WRITE.equals( accessType ) ) {

View File

@ -24,21 +24,18 @@
*/
package org.hibernate.cache.impl.bridge;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.EntityRegion;
import org.hibernate.cache.Cache;
import org.hibernate.cache.CacheDataDescription;
import org.hibernate.cache.OptimisticCache;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.CacheConcurrencyStrategy;
import org.hibernate.cache.CacheDataDescription;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.EntityRegion;
import org.hibernate.cache.NonstrictReadWriteCache;
import org.hibernate.cache.OptimisticCache;
import org.hibernate.cache.ReadOnlyCache;
import org.hibernate.cache.ReadWriteCache;
import org.hibernate.cache.NonstrictReadWriteCache;
import org.hibernate.cache.TransactionalCache;
import org.hibernate.cache.access.EntityRegionAccessStrategy;
import org.hibernate.cache.access.AccessType;
import org.hibernate.cache.access.EntityRegionAccessStrategy;
import org.hibernate.cfg.Settings;
/**
@ -47,7 +44,8 @@ import org.hibernate.cfg.Settings;
* @author Steve Ebersole
*/
public class EntityRegionAdapter extends BaseTransactionalDataRegionAdapter implements EntityRegion {
private static final Logger log = LoggerFactory.getLogger( EntityRegionAdapter.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
public EntityRegionAdapter(Cache underlyingCache, Settings settings, CacheDataDescription metadata) {
super( underlyingCache, settings, metadata );
@ -59,9 +57,7 @@ public class EntityRegionAdapter extends BaseTransactionalDataRegionAdapter impl
public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
CacheConcurrencyStrategy ccs;
if ( AccessType.READ_ONLY.equals( accessType ) ) {
if ( metadata.isMutable() ) {
log.warn( "read-only cache configured for mutable entity [" + getName() + "]" );
}
if (metadata.isMutable()) LOG.readOnlyCacheConfiguredForMutableCollection(getName());
ccs = new ReadOnlyCache();
}
else if ( AccessType.READ_WRITE.equals( accessType ) ) {
@ -79,5 +75,4 @@ public class EntityRegionAdapter extends BaseTransactionalDataRegionAdapter impl
ccs.setCache( underlyingCache );
return new EntityAccessStrategyAdapter( this, ccs, settings );
}
}

View File

@ -24,24 +24,20 @@
package org.hibernate.cache.impl.bridge;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cache.RegionFactory;
import org.hibernate.cache.CacheProvider;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.EntityRegion;
import org.hibernate.cache.CollectionRegion;
import org.hibernate.cache.QueryResultsRegion;
import org.hibernate.cache.NoCacheProvider;
import org.hibernate.cache.TimestampsRegion;
import org.hibernate.cache.CacheDataDescription;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.CacheProvider;
import org.hibernate.cache.CollectionRegion;
import org.hibernate.cache.EntityRegion;
import org.hibernate.cache.NoCacheProvider;
import org.hibernate.cache.QueryResultsRegion;
import org.hibernate.cache.RegionFactory;
import org.hibernate.cache.TimestampsRegion;
import org.hibernate.cache.access.AccessType;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.ReflectHelper;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.Settings;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.ReflectHelper;
/**
* Acts as a bridge between the {@link RegionFactory} contract and the older
@ -51,14 +47,15 @@ import org.hibernate.cfg.Settings;
*/
public class RegionFactoryCacheProviderBridge implements RegionFactory {
public static final String DEF_PROVIDER = NoCacheProvider.class.getName();
private static final Logger log = LoggerFactory.getLogger( RegionFactoryCacheProviderBridge.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Logger.class.getPackage().getName());
private CacheProvider cacheProvider;
private Settings settings;
public RegionFactoryCacheProviderBridge(Properties properties) {
String providerClassName = ConfigurationHelper.getString( Environment.CACHE_PROVIDER, properties, DEF_PROVIDER );
log.info( "Cache provider: " + providerClassName );
LOG.cacheProvider(providerClassName);
try {
cacheProvider = ( CacheProvider ) ReflectHelper.classForName( providerClassName ).newInstance();
}

View File

@ -23,6 +23,10 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Arrays;
@ -79,10 +83,6 @@ import javax.persistence.Table;
import javax.persistence.TableGenerator;
import javax.persistence.UniqueConstraint;
import javax.persistence.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.EntityMode;
@ -173,6 +173,10 @@ import org.hibernate.mapping.UnionSubclass;
import org.hibernate.persister.entity.JoinedSubclassEntityPersister;
import org.hibernate.persister.entity.SingleTableEntityPersister;
import org.hibernate.persister.entity.UnionSubclassEntityPersister;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* JSR 175 annotation binder which reads the annotations from classes, applies the
@ -185,25 +189,26 @@ import org.hibernate.persister.entity.UnionSubclassEntityPersister;
@SuppressWarnings("unchecked")
public final class AnnotationBinder {
/*
* Some design description
* I tried to remove any link to annotation except from the 2 first level of
* method call.
* It'll enable to:
* - facilitate annotation overriding
* - mutualize one day xml and annotation binder (probably a dream though)
* - split this huge class in smaller mapping oriented classes
*
* bindSomething usually create the mapping container and is accessed by one of the 2 first level method
* makeSomething usually create the mapping container and is accessed by bindSomething[else]
* fillSomething take the container into parameter and fill it.
*/
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
AnnotationBinder.class.getPackage().getName());
/*
* Some design description
* I tried to remove any link to annotation except from the 2 first level of
* method call.
* It'll enable to:
* - facilitate annotation overriding
* - mutualize one day xml and annotation binder (probably a dream though)
* - split this huge class in smaller mapping oriented classes
*
* bindSomething usually create the mapping container and is accessed by one of the 2 first level method
* makeSomething usually create the mapping container and is accessed by bindSomething[else]
* fillSomething take the container into parameter and fill it.
*/
private AnnotationBinder() {
}
private static final Logger log = LoggerFactory.getLogger( AnnotationBinder.class );
public static void bindDefaults(Mappings mappings) {
Map defaults = mappings.getReflectionManager().getDefaults();
{
@ -260,14 +265,14 @@ public final class AnnotationBinder {
pckg = mappings.getReflectionManager().packageForName( packageName );
}
catch ( ClassNotFoundException cnf ) {
log.warn( "Package not found or wo package-info.java: {}", packageName );
LOG.packageNotFound(packageName);
return;
}
if ( pckg.isAnnotationPresent( SequenceGenerator.class ) ) {
SequenceGenerator ann = pckg.getAnnotation( SequenceGenerator.class );
IdGenerator idGen = buildIdGenerator( ann, mappings );
mappings.addGenerator( idGen );
log.trace( "Add sequence generator with name: {}", idGen.getName() );
LOG.addSequenceGenerator(idGen.getName());
}
if ( pckg.isAnnotationPresent( TableGenerator.class ) ) {
TableGenerator ann = pckg.getAnnotation( TableGenerator.class );
@ -376,10 +381,10 @@ public final class AnnotationBinder {
idGen.addParam( org.hibernate.id.enhanced.TableGenerator.CONFIG_PREFER_SEGMENT_PER_ENTITY, "true" );
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.catalog() ) ) {
idGen.addParam( org.hibernate.id.enhanced.TableGenerator.CATALOG, tabGen.catalog() );
idGen.addParam( PersistentIdentifierGenerator.CATALOG, tabGen.catalog() );
}
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.schema() ) ) {
idGen.addParam( org.hibernate.id.enhanced.TableGenerator.SCHEMA, tabGen.schema() );
idGen.addParam( PersistentIdentifierGenerator.SCHEMA, tabGen.schema() );
}
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.table() ) ) {
idGen.addParam( org.hibernate.id.enhanced.TableGenerator.TABLE_PARAM, tabGen.table() );
@ -408,9 +413,7 @@ public final class AnnotationBinder {
org.hibernate.id.enhanced.TableGenerator.INITIAL_PARAM,
String.valueOf( tabGen.initialValue() + 1 )
);
if ( tabGen.uniqueConstraints() != null && tabGen.uniqueConstraints().length > 0 ) {
log.warn( "Ignoring unique constraints specified on table generator [{}]", tabGen.name() );
}
if (tabGen.uniqueConstraints() != null && tabGen.uniqueConstraints().length > 0) LOG.tableGenerator(tabGen.name());
}
else {
idGen.setIdentifierGeneratorStrategy( MultipleHiLoPerTableGenerator.class.getName() );
@ -419,15 +422,13 @@ public final class AnnotationBinder {
idGen.addParam( MultipleHiLoPerTableGenerator.ID_TABLE, tabGen.table() );
}
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.catalog() ) ) {
idGen.addParam( MultipleHiLoPerTableGenerator.CATALOG, tabGen.catalog() );
idGen.addParam( PersistentIdentifierGenerator.CATALOG, tabGen.catalog() );
}
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.schema() ) ) {
idGen.addParam( MultipleHiLoPerTableGenerator.SCHEMA, tabGen.schema() );
idGen.addParam( PersistentIdentifierGenerator.SCHEMA, tabGen.schema() );
}
//FIXME implement uniqueconstrains
if ( tabGen.uniqueConstraints() != null && tabGen.uniqueConstraints().length > 0 ) {
log.warn( "Ignoring unique constraints specified on table generator [{}]", tabGen.name() );
}
if (tabGen.uniqueConstraints() != null && tabGen.uniqueConstraints().length > 0) LOG.ignoringTableGeneratorConstraints(tabGen.name());
if ( !BinderHelper.isEmptyAnnotationValue( tabGen.pkColumnName() ) ) {
idGen.addParam( MultipleHiLoPerTableGenerator.PK_COLUMN_NAME, tabGen.pkColumnName() );
@ -440,7 +441,7 @@ public final class AnnotationBinder {
}
idGen.addParam( TableHiLoGenerator.MAX_LO, String.valueOf( tabGen.allocationSize() - 1 ) );
}
log.trace( "Add table generator with name: {}", idGen.getName() );
LOG.addTableGenerator(idGen.getName());
}
else if ( ann instanceof SequenceGenerator ) {
SequenceGenerator seqGen = ( SequenceGenerator ) ann;
@ -449,10 +450,10 @@ public final class AnnotationBinder {
idGen.setIdentifierGeneratorStrategy( SequenceStyleGenerator.class.getName() );
if ( !BinderHelper.isEmptyAnnotationValue( seqGen.catalog() ) ) {
idGen.addParam( SequenceStyleGenerator.CATALOG, seqGen.catalog() );
idGen.addParam( PersistentIdentifierGenerator.CATALOG, seqGen.catalog() );
}
if ( !BinderHelper.isEmptyAnnotationValue( seqGen.schema() ) ) {
idGen.addParam( SequenceStyleGenerator.SCHEMA, seqGen.schema() );
idGen.addParam( PersistentIdentifierGenerator.SCHEMA, seqGen.schema() );
}
if ( !BinderHelper.isEmptyAnnotationValue( seqGen.sequenceName() ) ) {
idGen.addParam( SequenceStyleGenerator.SEQUENCE_PARAM, seqGen.sequenceName() );
@ -468,14 +469,9 @@ public final class AnnotationBinder {
}
//FIXME: work on initialValue() through SequenceGenerator.PARAMETERS
// steve : or just use o.h.id.enhanced.SequenceStyleGenerator
if ( seqGen.initialValue() != 1 ) {
log.warn(
"Hibernate does not support SequenceGenerator.initialValue() unless '{}' set",
Configuration.USE_NEW_ID_GENERATOR_MAPPINGS
);
}
if (seqGen.initialValue() != 1) LOG.unsupportedInitialValue(Configuration.USE_NEW_ID_GENERATOR_MAPPINGS);
idGen.addParam( SequenceHiLoGenerator.MAX_LO, String.valueOf( seqGen.allocationSize() - 1 ) );
log.trace( "Add sequence generator with name: {}", idGen.getName() );
LOG.addSequenceGenerator(idGen.getName());
}
}
else if ( ann instanceof GenericGenerator ) {
@ -486,7 +482,7 @@ public final class AnnotationBinder {
for ( Parameter parameter : params ) {
idGen.addParam( parameter.name(), parameter.value() );
}
log.trace( "Add generic generator with name: {}", idGen.getName() );
LOG.addGenericGenerator(idGen.getName());
}
else {
throw new AssertionFailure( "Unknown Generator annotation: " + ann );
@ -508,7 +504,7 @@ public final class AnnotationBinder {
Map<XClass, InheritanceState> inheritanceStatePerClass,
Mappings mappings) throws MappingException {
//@Entity and @MappedSuperclass on the same class leads to a NPE down the road
if ( clazzToProcess.isAnnotationPresent( Entity.class )
if ( clazzToProcess.isAnnotationPresent( Entity.class )
&& clazzToProcess.isAnnotationPresent( MappedSuperclass.class ) ) {
throw new AnnotationException( "An entity cannot be annotated with both @Entity and @MappedSuperclass: "
+ clazzToProcess.getName() );
@ -529,7 +525,7 @@ public final class AnnotationBinder {
return;
}
log.info( "Binding entity from annotated class: {}", clazzToProcess.getName() );
LOG.bindingEntityFromClass(clazzToProcess.getName());
PersistentClass superEntity = getSuperEntity(
clazzToProcess, inheritanceStatePerClass, mappings, inheritanceState
@ -596,15 +592,8 @@ public final class AnnotationBinder {
superEntity.getTable() :
null
);
}
else {
if ( clazzToProcess.isAnnotationPresent( Table.class ) ) {
log.warn(
"Illegal use of @Table in a subclass of a SINGLE_TABLE hierarchy: " + clazzToProcess
.getName()
);
}
}
} else if (clazzToProcess.isAnnotationPresent(Table.class)) LOG.invalidTableAnnotation(clazzToProcess.getName());
PropertyHolder propertyHolder = PropertyHolderBuilder.buildPropertyHolder(
clazzToProcess,
@ -673,11 +662,7 @@ public final class AnnotationBinder {
}
}
}
if ( onDeleteAnn != null && !onDeleteAppropriate ) {
log.warn(
"Inapropriate use of @OnDelete on entity, annotation ignored: {}", propertyHolder.getEntityName()
);
}
if (onDeleteAnn != null && !onDeleteAppropriate) LOG.invalidOnDeleteAnnotation(propertyHolder.getEntityName());
// try to find class level generators
HashMap<String, IdGenerator> classGenerators = buildLocalGenerators( clazzToProcess, mappings );
@ -753,12 +738,7 @@ public final class AnnotationBinder {
discriminatorType, discAnn, discFormulaAnn, mappings
);
}
if ( discAnn != null && inheritanceState.hasParents() ) {
log.warn(
"Discriminator column has to be defined in the root entity, it will be ignored in subclass: {}",
clazzToProcess.getName()
);
}
if (discAnn != null && inheritanceState.hasParents()) LOG.invalidDescriminatorAnnotation(clazzToProcess.getName());
String discrimValue = clazzToProcess.isAnnotationPresent( DiscriminatorValue.class ) ?
clazzToProcess.getAnnotation( DiscriminatorValue.class ).value() :
@ -766,7 +746,7 @@ public final class AnnotationBinder {
entityBinder.setDiscriminatorValue( discrimValue );
if ( clazzToProcess.isAnnotationPresent( ForceDiscriminator.class ) ) {
log.warn( "@ForceDiscriminator is deprecated use @DiscriminatorOptions instead." );
LOG.deprecatedForceDescriminatorAnnotation();
entityBinder.setForceDiscriminator( true );
}
@ -1016,7 +996,7 @@ public final class AnnotationBinder {
SharedCacheMode mode;
final Object value = mappings.getConfigurationProperties().get( "javax.persistence.sharedCache.mode" );
if ( value == null ) {
log.debug( "no value specified for 'javax.persistence.sharedCache.mode'; using UNSPECIFIED" );
LOG.sharedCacheModeNotFound();
mode = SharedCacheMode.UNSPECIFIED;
}
else {
@ -1028,10 +1008,7 @@ public final class AnnotationBinder {
mode = SharedCacheMode.valueOf( value.toString() );
}
catch ( Exception e ) {
log.debug(
"Unable to resolve given mode name [" + value.toString()
+ "]; using UNSPECIFIED : " + e.toString()
);
LOG.invalidSharedCacheMode(value, e);
mode = SharedCacheMode.UNSPECIFIED;
}
}
@ -1047,24 +1024,24 @@ public final class AnnotationBinder {
static void prepareDefaultCacheConcurrencyStrategy(Properties properties) {
if ( DEFAULT_CACHE_CONCURRENCY_STRATEGY != null ) {
log.trace( "Default cache concurrency strategy already defined" );
LOG.defaultCacheConcurrencyStrategyAlreadyDefined();
return;
}
if ( !properties.containsKey( Configuration.DEFAULT_CACHE_CONCURRENCY_STRATEGY ) ) {
log.trace( "Given properties did not contain any default cache concurrency strategy setting" );
LOG.defaultCacheConcurrencyStrategyNotFound();
return;
}
final String strategyName = properties.getProperty( Configuration.DEFAULT_CACHE_CONCURRENCY_STRATEGY );
log.trace( "Discovered default cache concurrency strategy via config [" + strategyName + "]" );
LOG.defaultCacheConcurrencyStrategyDiscovered(strategyName);
CacheConcurrencyStrategy strategy = CacheConcurrencyStrategy.parse( strategyName );
if ( strategy == null ) {
log.trace( "Discovered default cache concurrency strategy specified nothing" );
LOG.defaultCacheConcurrencyStrategySpecifiedNothing();
return;
}
log.debug( "Setting default cache concurrency strategy via config [" + strategy.name() + "]" );
LOG.defaultCacheConcurrencyStrategy(strategy.name());
DEFAULT_CACHE_CONCURRENCY_STRATEGY = strategy;
}
@ -1159,13 +1136,11 @@ public final class AnnotationBinder {
( Map<String, Join> ) null, ( PropertyHolder ) null, mappings
);
}
log.trace( "Subclass joined column(s) created" );
LOG.subclassJoinedColumnsCreated();
}
else {
if ( clazzToProcess.isAnnotationPresent( PrimaryKeyJoinColumns.class )
|| clazzToProcess.isAnnotationPresent( PrimaryKeyJoinColumn.class ) ) {
log.warn( "Root entity should not hold an PrimaryKeyJoinColum(s), will be ignored" );
}
if (clazzToProcess.isAnnotationPresent(PrimaryKeyJoinColumns.class)
|| clazzToProcess.isAnnotationPresent(PrimaryKeyJoinColumn.class)) LOG.invalidPrimaryKeyJoinColumnAnnotation();
}
return inheritanceJoinedColumns;
}
@ -1196,13 +1171,9 @@ public final class AnnotationBinder {
|| AnnotatedClassType.NONE.equals( classType ) //to be ignored
|| AnnotatedClassType.EMBEDDABLE.equals( classType ) //allow embeddable element declaration
) {
if ( AnnotatedClassType.NONE.equals( classType )
&& clazzToProcess.isAnnotationPresent( org.hibernate.annotations.Entity.class ) ) {
log.warn(
"Class annotated @org.hibernate.annotations.Entity but not javax.persistence.Entity "
+ "(most likely a user error): {}", clazzToProcess.getName()
);
}
if (AnnotatedClassType.NONE.equals(classType)
&& clazzToProcess.isAnnotationPresent(org.hibernate.annotations.Entity.class))
LOG.missingEntityAnnotation(clazzToProcess.getName());
return false;
}
@ -1271,7 +1242,7 @@ public final class AnnotationBinder {
params.put( param.name(), mappings.getTypeResolver().heuristicType( param.type() ) );
}
FilterDefinition def = new FilterDefinition( defAnn.name(), defAnn.defaultCondition(), params );
log.info( "Binding filter definition: {}", def.getFilterName() );
LOG.bindingFilterDefinition(def.getFilterName());
mappings.addFilterDefinition( def );
}
@ -1327,11 +1298,11 @@ public final class AnnotationBinder {
}
if ( !BinderHelper.isEmptyAnnotationValue( defAnn.name() ) ) {
log.info( "Binding type definition: {}", defAnn.name() );
LOG.bindingTypeDefinition(defAnn.name());
mappings.addTypeDef( defAnn.name(), defAnn.typeClass().getName(), params );
}
if ( !defAnn.defaultForType().equals( void.class ) ) {
log.info( "Binding type definition: {}", defAnn.defaultForType().getName() );
LOG.bindingTypeDefinition(defAnn.defaultForType().getName());
mappings.addTypeDef( defAnn.defaultForType().getName(), defAnn.typeClass().getName(), params );
}
@ -1355,7 +1326,7 @@ public final class AnnotationBinder {
discriminatorColumn.linkWithValue( discrim );
discrim.setTypeName( discriminatorColumn.getDiscriminatorTypeName() );
rootClass.setPolymorphic( true );
log.trace( "Setting discriminator for entity {}", rootClass.getEntityName() );
LOG.settingDiscriminator(rootClass.getEntityName());
}
}
@ -1474,9 +1445,7 @@ public final class AnnotationBinder {
* ordering does not matter
*/
log.trace(
"Processing annotations of {}.{}", propertyHolder.getEntityName(), inferredData.getPropertyName()
);
LOG.processingAnnotations(propertyHolder.getEntityName(), inferredData.getPropertyName());
final XProperty property = inferredData.getProperty();
if ( property.isAnnotationPresent( Parent.class ) ) {
@ -1540,7 +1509,7 @@ public final class AnnotationBinder {
+ propertyHolder.getEntityName()
);
}
log.trace( "{} is a version property", inferredData.getPropertyName() );
LOG.versionProperty(inferredData.getPropertyName());
RootClass rootClass = ( RootClass ) propertyHolder.getPersistentClass();
propertyBinder.setColumns( columns );
Property prop = propertyBinder.makePropertyValueAndBind();
@ -1564,10 +1533,7 @@ public final class AnnotationBinder {
SimpleValue simpleValue = ( SimpleValue ) prop.getValue();
simpleValue.setNullValue( "undefined" );
rootClass.setOptimisticLockMode( Versioning.OPTIMISTIC_LOCK_VERSION );
log.trace(
"Version name: {}, unsavedValue: {}", rootClass.getVersion().getName(),
( ( SimpleValue ) rootClass.getVersion().getValue() ).getNullValue()
);
LOG.version(rootClass.getVersion().getName(), ((SimpleValue)rootClass.getVersion().getValue()).getNullValue());
}
else {
final boolean forcePersist = property.isAnnotationPresent( MapsId.class )
@ -2132,7 +2098,7 @@ public final class AnnotationBinder {
}
private static void setVersionInformation(XProperty property, PropertyBinder propertyBinder) {
propertyBinder.getSimpleValueBinder().setVersion( true );
propertyBinder.getSimpleValueBinder().setVersion( true );
if(property.isAnnotationPresent( Source.class )) {
Source source = property.getAnnotation( Source.class );
propertyBinder.getSimpleValueBinder().setTimestampVersionType( source.value().typeName() );
@ -2175,9 +2141,7 @@ public final class AnnotationBinder {
} //a component must not have any generator
BinderHelper.makeIdGenerator( idValue, generatorType, generatorName, mappings, localGenerators );
log.trace(
"Bind {} on {}", ( isComponent ? "@EmbeddedId" : "@Id" ), inferredData.getPropertyName()
);
LOG.bindAnnotationToProperty((isComponent ? "@EmbeddedId" : "@Id"), inferredData.getPropertyName());
}
//TODO move that to collection binder?
@ -2359,7 +2323,7 @@ public final class AnnotationBinder {
*/
Component comp = createComponent( propertyHolder, inferredData, isComponentEmbedded, isIdentifierMapper, mappings );
String subpath = BinderHelper.getPath( propertyHolder, inferredData );
log.trace( "Binding component with path: {}", subpath );
LOG.bindingComponent(subpath);
PropertyHolder subHolder = PropertyHolderBuilder.buildPropertyHolder(
comp, subpath,
inferredData, propertyHolder, mappings
@ -2788,7 +2752,7 @@ public final class AnnotationBinder {
Mappings mappings) {
//column.getTable() => persistentClass.getTable()
final String propertyName = inferredData.getPropertyName();
log.trace( "Fetching {} with {}", propertyName, fetchMode );
LOG.fetching(propertyName, fetchMode);
boolean mapToPK = true;
if ( !trueOneToOne ) {
//try to find a hidden true one to one (FK == PK columns)
@ -3076,12 +3040,7 @@ public final class AnnotationBinder {
if ( superclassState.getType() != null ) {
final boolean mixingStrategy = state.getType() != null && !state.getType()
.equals( superclassState.getType() );
if ( nonDefault && mixingStrategy ) {
log.warn(
"Mixing inheritance strategy in a entity hierarchy is not allowed, ignoring sub strategy in: {}",
clazz.getName()
);
}
if (nonDefault && mixingStrategy) LOG.invalidSubStrategy(clazz.getName());
state.setType( superclassState.getType() );
}
}
@ -3117,4 +3076,143 @@ public final class AnnotationBinder {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Add generic generator with name: %s" )
void addGenericGenerator( String name );
@LogMessage( level = TRACE )
@Message( value = "Add sequence generator with name: %s" )
void addSequenceGenerator( String name );
@LogMessage( level = TRACE )
@Message( value = "Add table generator with name: %s" )
void addTableGenerator( String name );
@LogMessage( level = TRACE )
@Message( value = "Bind %s on %s" )
void bindAnnotationToProperty( String annotation,
String propertyName );
@LogMessage( level = TRACE )
@Message( value = "Binding component with path: %s" )
void bindingComponent( String subpath );
@LogMessage( level = INFO )
@Message( value = "Binding entity from annotated class: %s" )
void bindingEntityFromClass( String className );
@LogMessage( level = INFO )
@Message( value = "Binding filter definition: %s" )
void bindingFilterDefinition( String name );
@LogMessage( level = INFO )
@Message( value = "Binding type definition: %s" )
void bindingTypeDefinition( String name );
@LogMessage( level = DEBUG )
@Message( value = "Setting default cache concurrency strategy via config [%s]" )
void defaultCacheConcurrencyStrategy( String strategy );
@LogMessage( level = TRACE )
@Message( value = "Default cache concurrency strategy already defined" )
void defaultCacheConcurrencyStrategyAlreadyDefined();
@LogMessage( level = TRACE )
@Message( value = "Discovered default cache concurrency strategy via config [%s]" )
void defaultCacheConcurrencyStrategyDiscovered( String strategy );
@LogMessage( level = TRACE )
@Message( value = "Given properties did not contain any default cache concurrency strategy setting" )
void defaultCacheConcurrencyStrategyNotFound();
@LogMessage( level = TRACE )
@Message( value = "Discovered default cache concurrency strategy specified nothing" )
void defaultCacheConcurrencyStrategySpecifiedNothing();
@LogMessage( level = WARN )
@Message( value = "@ForceDiscriminator is deprecated use @DiscriminatorOptions instead." )
void deprecatedForceDescriminatorAnnotation();
@LogMessage( level = TRACE )
@Message( value = "Fetching %s with %s" )
void fetching( String propertyName,
FetchMode fetchMode );
@LogMessage( level = WARN )
@Message( value = "Ignoring unique constraints specified on table generator [%s]" )
void ignoringTableGeneratorConstraints(String name);
@LogMessage( level = WARN )
@Message( value = "Discriminator column has to be defined in the root entity, it will be ignored in subclass: %s" )
void invalidDescriminatorAnnotation( String className );
@LogMessage( level = WARN )
@Message( value = "Inapropriate use of @OnDelete on entity, annotation ignored: %s" )
void invalidOnDeleteAnnotation( String entityName );
@LogMessage( level = WARN )
@Message( value = "Root entity should not hold an PrimaryKeyJoinColum(s), will be ignored" )
void invalidPrimaryKeyJoinColumnAnnotation();
@LogMessage( level = DEBUG )
@Message( value = "Unable to resolve given mode name [%s]; using UNSPECIFIED : %s" )
void invalidSharedCacheMode( Object value,
Exception error );
@LogMessage( level = WARN )
@Message( value = "Mixing inheritance strategy in a entity hierarchy is not allowed, ignoring sub strategy in: %s" )
void invalidSubStrategy( String className );
@LogMessage( level = WARN )
@Message( value = "Illegal use of @Table in a subclass of a SINGLE_TABLE hierarchy: %s" )
void invalidTableAnnotation( String className );
@LogMessage( level = WARN )
@Message( value = "Class annotated @org.hibernate.annotations.Entity but not javax.persistence.Entity (most likely a user error): %s" )
void missingEntityAnnotation( String className );
@LogMessage( level = WARN )
@Message( value = "Package not found or wo package-info.java: %s" )
void packageNotFound( String packageName );
@LogMessage( level = TRACE )
@Message( value = "Processing annotations of %s.%s" )
void processingAnnotations( String entityName,
String propertyName );
@LogMessage( level = TRACE )
@Message( value = "Setting discriminator for entity %s" )
void settingDiscriminator( String entityName );
@LogMessage( level = DEBUG )
@Message( value = "No value specified for 'javax.persistence.sharedCache.mode'; using UNSPECIFIED" )
void sharedCacheModeNotFound();
@LogMessage( level = TRACE )
@Message( value = "Subclass joined column(s) created" )
void subclassJoinedColumnsCreated();
@LogMessage( level = WARN )
@Message( value = "%s" )
void tableGenerator( String name );
@LogMessage( level = WARN )
@Message( value = "Hibernate does not support SequenceGenerator.initialValue() unless '%s' set" )
void unsupportedInitialValue( String propertyName );
@LogMessage( level = TRACE )
@Message( value = "Version name: %s, unsavedValue: %s" )
void version( String name,
String nullValue );
@LogMessage( level = TRACE )
@Message( value = "%s is a version property" )
void versionProperty( String propertyName );
}
}

View File

@ -26,36 +26,11 @@ package org.hibernate.cfg;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MappedSuperclass;
import javax.persistence.MapsId;
import org.dom4j.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.DuplicateMappingException;
import org.hibernate.HibernateException;
import org.hibernate.Interceptor;
import org.hibernate.MappingException;
import org.hibernate.annotations.AnyMetaDef;
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.engine.NamedQueryDefinition;
import org.hibernate.engine.NamedSQLQueryDefinition;
import org.hibernate.engine.ResultSetMappingDefinition;
import org.hibernate.mapping.IdGenerator;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Table;
import org.hibernate.util.CollectionHelper;
/**
* Similar to the {@link Configuration} object but handles EJB3 and Hibernate
@ -68,7 +43,6 @@ import org.hibernate.util.CollectionHelper;
*/
@Deprecated
public class AnnotationConfiguration extends Configuration {
private Logger log = LoggerFactory.getLogger( AnnotationConfiguration.class );
public AnnotationConfiguration() {
super();

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.INFO;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@ -33,7 +34,6 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.MappingException;
@ -54,6 +54,7 @@ import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.IdGenerator;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.MappedSuperclass;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.SimpleValue;
@ -61,10 +62,11 @@ import org.hibernate.mapping.SyntheticProperty;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.ToOne;
import org.hibernate.mapping.Value;
import org.hibernate.mapping.MappedSuperclass;
import org.hibernate.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
@ -72,7 +74,9 @@ import org.slf4j.LoggerFactory;
public class BinderHelper {
public static final String ANNOTATION_STRING_DEFAULT = "";
private static Logger log = LoggerFactory.getLogger( BinderHelper.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
BinderHelper.class.getPackage().getName());
private BinderHelper() {
}
@ -590,7 +594,7 @@ public class BinderHelper {
Ejb3Column[] metaColumns = Ejb3Column.buildColumnFromAnnotation(
new javax.persistence.Column[] { metaColumn }, null,
nullability, propertyHolder, inferredData, entityBinder.getSecondaryTables(), mappings
nullability, propertyHolder, inferredData, entityBinder.getSecondaryTables(), mappings
);
//set metaColumn to the right table
for (Ejb3Column column : metaColumns) {
@ -638,7 +642,7 @@ public class BinderHelper {
private static void bindAnyMetaDef(AnyMetaDef defAnn, Mappings mappings) {
if ( isEmptyAnnotationValue( defAnn.name() ) ) return; //don't map not named definitions
log.info( "Binding Any Meta definition: {}", defAnn.name() );
LOG.bindingAnyMetaDefinition(defAnn.name());
mappings.addAnyMetaDef( defAnn );
}
@ -692,4 +696,15 @@ public class BinderHelper {
return mappings.getPropertyAnnotatedWithMapsId( persistentXClass, propertyPath );
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Binding Any Meta definition: %s" )
void bindingAnyMetaDefinition( String name );
}
}

View File

@ -23,18 +23,20 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.MappingException;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.IndexedCollection;
import org.hibernate.mapping.OneToMany;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.Value;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Collection second pass
@ -42,7 +44,10 @@ import org.hibernate.mapping.Value;
* @author Emmanuel Bernard
*/
public abstract class CollectionSecondPass implements SecondPass {
private static Logger log = LoggerFactory.getLogger( CollectionSecondPass.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionSecondPass.class.getPackage().getName());
Mappings mappings;
Collection collection;
private Map localInheritedMetas;
@ -59,13 +64,12 @@ public abstract class CollectionSecondPass implements SecondPass {
public void doSecondPass(java.util.Map persistentClasses)
throws MappingException {
if ( log.isDebugEnabled() )
log.debug( "Second pass for collection: " + collection.getRole() );
LOG.secondPass(collection.getRole());
secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map
collection.createAllKeys();
if ( log.isDebugEnabled() ) {
if (LOG.isDebugEnabled()) {
String msg = "Mapped collection key: " + columns( collection.getKey() );
if ( collection.isIndexed() )
msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() );
@ -76,7 +80,7 @@ public abstract class CollectionSecondPass implements SecondPass {
else {
msg += ", element: " + columns( collection.getElement() );
}
log.debug( msg );
LOG.mappedCollection(msg);
}
}
@ -92,4 +96,19 @@ public abstract class CollectionSecondPass implements SecondPass {
}
return columns.toString();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "%s" )
void mappedCollection( String message );
@LogMessage( level = DEBUG )
@Message( value = "Second pass for collection: %s" )
void secondPass( String role );
}
}

View File

@ -23,6 +23,11 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
@ -58,16 +63,10 @@ import java.util.zip.ZipEntry;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MapsId;
import org.dom4j.Attribute;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.hibernate.AnnotationException;
import org.hibernate.DuplicateMappingException;
import org.hibernate.EmptyInterceptor;
@ -153,7 +152,6 @@ import org.hibernate.mapping.UniqueKey;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.secure.JACCConfiguration;
import org.hibernate.service.spi.ServicesRegistry;
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
import org.hibernate.tool.hbm2ddl.IndexMetadata;
import org.hibernate.tool.hbm2ddl.TableMetadata;
@ -177,6 +175,12 @@ import org.hibernate.util.xml.Origin;
import org.hibernate.util.xml.OriginImpl;
import org.hibernate.util.xml.XmlDocument;
import org.hibernate.util.xml.XmlDocumentImpl;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
/**
* An instance of <tt>Configuration</tt> allows the application
@ -196,7 +200,9 @@ import org.hibernate.util.xml.XmlDocumentImpl;
* @see org.hibernate.SessionFactory
*/
public class Configuration implements Serializable {
private static Logger log = LoggerFactory.getLogger( Configuration.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Configuration.class.getPackage().getName());
/**
* Setting used to give the name of the default {@link org.hibernate.annotations.CacheConcurrencyStrategy}
@ -492,7 +498,7 @@ public class Configuration implements Serializable {
* have indicated a problem parsing the XML document, but that is now delayed until after {@link #buildMappings}
*/
public Configuration addFile(final File xmlFile) throws MappingException {
log.info( "Reading mappings from file: " + xmlFile.getPath() );
LOG.readingMappingsFromFile(xmlFile.getPath());
final String name = xmlFile.getAbsolutePath();
final InputSource inputSource;
try {
@ -561,10 +567,10 @@ public class Configuration implements Serializable {
return addCacheableFileStrictly( xmlFile );
}
catch ( SerializationException e ) {
log.warn( "Could not deserialize cache file: " + cachedFile.getPath() + " : " + e );
LOG.unableToDeserializeCache(cachedFile.getPath(), e);
}
catch ( FileNotFoundException e ) {
log.warn( "I/O reported cached file could not be found : " + cachedFile.getPath() + " : " + e );
LOG.cachedFileNotFound(cachedFile.getPath(), e);
}
final String name = xmlFile.getAbsolutePath();
@ -576,18 +582,14 @@ public class Configuration implements Serializable {
throw new MappingNotFoundException( "file", xmlFile.toString() );
}
log.info( "Reading mappings from file: " + xmlFile );
LOG.readingMappingsFromFile(xmlFile.getPath());
XmlDocument metadataXml = add( inputSource, "file", name );
try {
log.debug( "Writing cache file for: " + xmlFile + " to: " + cachedFile );
LOG.writingCacheFile(xmlFile, cachedFile);
SerializationHelper.serialize( ( Serializable ) metadataXml.getDocumentTree(), new FileOutputStream( cachedFile ) );
}
catch ( SerializationException e ) {
log.warn( "Could not write cached file: " + cachedFile, e );
}
catch ( FileNotFoundException e ) {
log.warn( "I/O reported error writing cached file : " + cachedFile.getPath(), e );
} catch (Exception e) {
LOG.unableToWriteCachedFile(cachedFile.getPath(), e.getMessage());
}
return this;
@ -621,7 +623,7 @@ public class Configuration implements Serializable {
throw new FileNotFoundException( "Cached file could not be found or could not be used" );
}
log.info( "Reading mappings from cache file: " + cachedFile );
LOG.readingCachedMappings(cachedFile);
Document document = ( Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) );
add( new XmlDocumentImpl( document, "file", xmlFile.getAbsolutePath() ) );
return this;
@ -651,9 +653,7 @@ public class Configuration implements Serializable {
* given XML string
*/
public Configuration addXML(String xml) throws MappingException {
if ( log.isDebugEnabled() ) {
log.debug( "Mapping XML:\n" + xml );
}
LOG.mappingXml(xml);
final InputSource inputSource = new InputSource( new StringReader( xml ) );
add( inputSource, "string", "XML String" );
return this;
@ -670,9 +670,7 @@ public class Configuration implements Serializable {
public Configuration addURL(URL url) throws MappingException {
final String urlExternalForm = url.toExternalForm();
if ( log.isDebugEnabled() ) {
log.debug( "Reading mapping document from URL : {}", urlExternalForm );
}
LOG.readingMappingDocument(urlExternalForm);
try {
add( url.openStream(), "URL", urlExternalForm );
@ -693,7 +691,7 @@ public class Configuration implements Serializable {
inputStream.close();
}
catch ( IOException ignore ) {
log.trace( "Was unable to close input stream" );
LOG.unableToCloseInputStream();
}
}
}
@ -707,9 +705,7 @@ public class Configuration implements Serializable {
* the mapping document.
*/
public Configuration addDocument(org.w3c.dom.Document doc) throws MappingException {
if ( log.isDebugEnabled() ) {
log.debug( "Mapping document:\n" + doc );
}
LOG.mappingDocument(doc);
final Document document = xmlHelper.createDOMReader().read( doc );
add( new XmlDocumentImpl( document, "unknown", null ) );
@ -740,7 +736,7 @@ public class Configuration implements Serializable {
* processing the contained mapping document.
*/
public Configuration addResource(String resourceName, ClassLoader classLoader) throws MappingException {
log.info( "Reading mappings from resource: " + resourceName );
LOG.readingMappingsFromResource(resourceName);
InputStream resourceInputStream = classLoader.getResourceAsStream( resourceName );
if ( resourceInputStream == null ) {
throw new MappingNotFoundException( "resource", resourceName );
@ -759,7 +755,7 @@ public class Configuration implements Serializable {
* processing the contained mapping document.
*/
public Configuration addResource(String resourceName) throws MappingException {
log.info( "Reading mappings from resource : " + resourceName );
LOG.readingMappingsFromResource(resourceName);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
InputStream resourceInputStream = null;
if ( contextClassLoader != null ) {
@ -787,7 +783,7 @@ public class Configuration implements Serializable {
*/
public Configuration addClass(Class persistentClass) throws MappingException {
String mappingResourceName = persistentClass.getName().replace( '.', '/' ) + ".hbm.xml";
log.info( "Reading mappings from resource: " + mappingResourceName );
LOG.readingMappingsFromResource(mappingResourceName);
return addResource( mappingResourceName, persistentClass.getClassLoader() );
}
@ -815,13 +811,13 @@ public class Configuration implements Serializable {
* @throws MappingException in case there is an error in the mapping data
*/
public Configuration addPackage(String packageName) throws MappingException {
log.info( "Mapping package {}", packageName );
LOG.mappingPackage(packageName);
try {
AnnotationBinder.bindPackage( packageName, createMappings() );
return this;
}
catch ( MappingException me ) {
log.error( "Could not parse the package-level metadata [" + packageName + "]" );
LOG.unableToParseMetadata(packageName);
throw me;
}
}
@ -837,7 +833,7 @@ public class Configuration implements Serializable {
* processing the contained mapping documents.
*/
public Configuration addJar(File jar) throws MappingException {
log.info( "Searching for mapping documents in jar: " + jar.getName() );
LOG.searchingForMappingDocuments(jar.getName());
JarFile jarFile = null;
try {
try {
@ -853,7 +849,7 @@ public class Configuration implements Serializable {
while ( jarEntries.hasMoreElements() ) {
ZipEntry ze = (ZipEntry) jarEntries.nextElement();
if ( ze.getName().endsWith( ".hbm.xml" ) ) {
log.info( "Found mapping document in jar: " + ze.getName() );
LOG.foundMappingDocument(ze.getName());
try {
addInputStream( jarFile.getInputStream( ze ) );
}
@ -875,7 +871,7 @@ public class Configuration implements Serializable {
}
}
catch (IOException ioe) {
log.error("could not close jar", ioe);
LOG.unableToCloseJar(ioe.getMessage());
}
}
@ -1171,7 +1167,7 @@ public class Configuration implements Serializable {
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
( table.getSchema() == null ) ? defaultSchema : table.getSchema(),
@ -1297,12 +1293,12 @@ public class Configuration implements Serializable {
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
@ -1349,7 +1345,7 @@ public class Configuration implements Serializable {
}
protected void secondPassCompile() throws MappingException {
log.trace( "Starting secondPassCompile() processing" );
LOG.startingSecondPassCompile();
//process default values first
{
@ -1436,7 +1432,7 @@ public class Configuration implements Serializable {
* an entity having a PK made of a ManyToOne ...).
*/
private void processFkSecondPassInOrder() {
log.debug( "processing fk mappings (*ToOne and JoinedSubclass)" );
LOG.processingForeignKeyMappings();
List<FkSecondPass> fkSecondPasses = getFKSecondPassesOnly();
if ( fkSecondPasses.size() == 0 ) {
@ -1645,9 +1641,7 @@ public class Configuration implements Serializable {
applyMethod = classValidator.getMethod( "apply", PersistentClass.class );
}
catch ( ClassNotFoundException e ) {
if ( !isValidatorNotPresentLogged ) {
log.info( "Hibernate Validator not found: ignoring" );
}
if (!isValidatorNotPresentLogged) LOG.validatorNotFound();
isValidatorNotPresentLogged = true;
}
catch ( NoSuchMethodException e ) {
@ -1665,7 +1659,7 @@ public class Configuration implements Serializable {
applyMethod.invoke( validator, persistentClazz );
}
catch ( Exception e ) {
log.warn( "Unable to apply constraints on DDL for " + className, e );
LOG.unableToApplyConstraints(className, e.getMessage());
}
}
}
@ -1678,10 +1672,10 @@ public class Configuration implements Serializable {
}
private void originalSecondPassCompile() throws MappingException {
log.debug( "processing extends queue" );
LOG.processingExtendsQueue();
processExtendsQueue();
log.debug( "processing collection mappings" );
LOG.processingCollectionMappings();
Iterator itr = secondPasses.iterator();
while ( itr.hasNext() ) {
SecondPass sp = (SecondPass) itr.next();
@ -1691,7 +1685,7 @@ public class Configuration implements Serializable {
}
}
log.debug( "processing native query and ResultSetMapping mappings" );
LOG.processingNativeQuery();
itr = secondPasses.iterator();
while ( itr.hasNext() ) {
SecondPass sp = (SecondPass) itr.next();
@ -1699,7 +1693,7 @@ public class Configuration implements Serializable {
itr.remove();
}
log.debug( "processing association property references" );
LOG.processingAssociationPropertyReferences();
itr = propertyReferences.iterator();
while ( itr.hasNext() ) {
@ -1721,7 +1715,7 @@ public class Configuration implements Serializable {
//TODO: Somehow add the newly created foreign keys to the internal collection
log.debug( "processing foreign key constraints" );
LOG.processingForeignKeyConstraints();
itr = getTableMappings();
Set done = new HashSet();
@ -1732,7 +1726,7 @@ public class Configuration implements Serializable {
}
private int processExtendsQueue() {
log.debug( "processing extends queue" );
LOG.processingExtendsQueue();
int added = 0;
ExtendsQueueEntry extendsQueueEntry = findPossibleExtends();
while ( extendsQueueEntry != null ) {
@ -1789,9 +1783,7 @@ public class Configuration implements Serializable {
" does not specify the referenced entity"
);
}
if ( log.isDebugEnabled() ) {
log.debug( "resolving reference to class: " + referencedEntityName );
}
LOG.resolvingReference(referencedEntityName);
PersistentClass referencedClass = classes.get( referencedEntityName );
if ( referencedClass == null ) {
throw new MappingException(
@ -1824,12 +1816,10 @@ public class Configuration implements Serializable {
* @throws HibernateException usually indicates an invalid configuration or invalid mapping information
*/
public SessionFactory buildSessionFactory(ServicesRegistry serviceRegistry) throws HibernateException {
log.debug( "Preparing to build session factory with filters : " + filterDefinitions );
LOG.debug("Preparing to build session factory with filters : " + filterDefinitions);
secondPassCompile();
if ( ! metadataSourceQueue.isEmpty() ) {
log.warn( "mapping metadata cache was not completely processed" );
}
if (!metadataSourceQueue.isEmpty()) LOG.incompleteMappingMetadataCacheProcessing();
enableLegacyHibernateValidator();
enableBeanValidation();
@ -1867,7 +1857,7 @@ public class Configuration implements Serializable {
}
catch ( ClassNotFoundException e ) {
//validator is not present
log.debug( "Legacy Validator not present in classpath, ignoring event listener registration" );
LOG.legacyValidatorNotFound();
}
if ( enableValidatorListeners && validateEventListenerClass != null ) {
//TODO so much duplication
@ -1950,7 +1940,7 @@ public class Configuration implements Serializable {
searchStartupClass = ReflectHelper.classForName( SEARCH_EVENT_LISTENER_REGISTERER_CLASS, getClass() );
}
catch ( ClassNotFoundException cnfe ) {
log.debug( "Search not present in classpath, ignoring event listener registration." );
LOG.searchNotFound();
return;
}
}
@ -1966,16 +1956,16 @@ public class Configuration implements Serializable {
enableSearchMethod.invoke( searchStartupInstance, getEventListeners(), getProperties() );
}
catch ( InstantiationException e ) {
log.debug( "Unable to instantiate {}, ignoring event listener registration.", SEARCH_STARTUP_CLASS );
LOG.unableToInstantiate(SEARCH_STARTUP_CLASS);
}
catch ( IllegalAccessException e ) {
log.debug( "Unable to instantiate {}, ignoring event listener registration.", SEARCH_STARTUP_CLASS );
LOG.unableToInstantiate(SEARCH_STARTUP_CLASS);
}
catch ( NoSuchMethodException e ) {
log.debug( "Method enableHibernateSearch() not found in {}.", SEARCH_STARTUP_CLASS );
LOG.methodNotFound(SEARCH_STARTUP_METHOD, SEARCH_STARTUP_CLASS);
}
catch ( InvocationTargetException e ) {
log.debug( "Unable to execute {}, ignoring event listener registration.", SEARCH_STARTUP_METHOD );
LOG.unableToExecute(SEARCH_STARTUP_METHOD);
}
}
@ -2089,7 +2079,7 @@ public class Configuration implements Serializable {
Element node = (Element) itr.next();
String name = node.attributeValue( "name" );
String value = node.getText().trim();
log.debug( name + "=" + value );
LOG.attribute(name, value);
properties.setProperty( name, value );
if ( !name.startsWith( "hibernate" ) ) {
properties.setProperty( "hibernate." + name, value );
@ -2127,7 +2117,7 @@ public class Configuration implements Serializable {
* @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(String resource) throws HibernateException {
log.info( "configuring from resource: " + resource );
LOG.configuringFromResource(resource);
InputStream stream = getConfigurationInputStream( resource );
return doConfigure( stream, resource );
}
@ -2146,7 +2136,7 @@ public class Configuration implements Serializable {
* @throws HibernateException Generally indicates we cannot find the named resource
*/
protected InputStream getConfigurationInputStream(String resource) throws HibernateException {
log.info( "Configuration resource: " + resource );
LOG.configurationResource(resource);
return ConfigHelper.getResourceAsStream( resource );
}
@ -2163,7 +2153,7 @@ public class Configuration implements Serializable {
* @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(URL url) throws HibernateException {
log.info( "configuring from url: " + url.toString() );
LOG.configuringFromUrl(url);
try {
return doConfigure( url.openStream(), url.toString() );
}
@ -2185,7 +2175,7 @@ public class Configuration implements Serializable {
* @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(File configFile) throws HibernateException {
log.info( "configuring from file: " + configFile.getName() );
LOG.configuringFromFile(configFile.getName());
try {
return doConfigure( new FileInputStream( configFile ), configFile.toString() );
}
@ -2224,7 +2214,7 @@ public class Configuration implements Serializable {
stream.close();
}
catch (IOException ioe) {
log.warn( "could not close input stream for: " + resourceName, ioe );
LOG.unableToCloseInputStream(resourceName, ioe.getMessage());
}
}
return this;
@ -2240,7 +2230,7 @@ public class Configuration implements Serializable {
* @throws HibernateException if there is problem in accessing the file.
*/
public Configuration configure(org.w3c.dom.Document document) throws HibernateException {
log.info( "configuring from XML document" );
LOG.configuringFromXmlDocument();
return doConfigure( xmlHelper.createDOMReader().read( document ) );
}
@ -2268,8 +2258,8 @@ public class Configuration implements Serializable {
parseSecurity( secNode );
}
log.info( "Configured SessionFactory: " + name );
log.debug( "properties: " + properties );
LOG.configuredSessionFactory(name);
LOG.properties(properties);
return this;
}
@ -2314,27 +2304,27 @@ public class Configuration implements Serializable {
if ( resourceAttribute != null ) {
final String resourceName = resourceAttribute.getValue();
log.debug( "session-factory config [{}] named resource [{}] for mapping", name, resourceName );
LOG.sessionFactoryConfigResourceForMapping(name, resourceName);
addResource( resourceName );
}
else if ( fileAttribute != null ) {
final String fileName = fileAttribute.getValue();
log.debug( "session-factory config [{}] named file [{}] for mapping", name, fileName );
LOG.sessionFactoryConfigFileForMapping(name, fileName);
addFile( fileName );
}
else if ( jarAttribute != null ) {
final String jarFileName = jarAttribute.getValue();
log.debug( "session-factory config [{}] named jar file [{}] for mapping", name, jarFileName );
LOG.sessionFactoryConfigJarForMapping(name, jarFileName);
addJar( new File( jarFileName ) );
}
else if ( packageAttribute != null ) {
final String packageName = packageAttribute.getValue();
log.debug( "session-factory config [{}] named package [{}] for mapping", name, packageName );
LOG.sessionFactoryConfigPackageForMapping(name, packageName);
addPackage( packageName );
}
else if ( classAttribute != null ) {
final String className = classAttribute.getValue();
log.debug( "session-factory config [{}] named class [{}] for mapping", name, className );
LOG.sessionFactoryConfigClassForMapping(name, className);
try {
addAnnotatedClass( ReflectHelper.classForName( className ) );
@ -2353,8 +2343,8 @@ public class Configuration implements Serializable {
private void parseSecurity(Element secNode) {
String contextId = secNode.attributeValue( "context" );
setProperty(Environment.JACC_CONTEXTID, contextId);
log.info( "JACC contextID: " + contextId );
setProperty(Environment.JACC_CONTEXTID, contextId);
LOG.jaccContextId(contextId);
JACCConfiguration jcfg = new JACCConfiguration( contextId );
Iterator grantElements = secNode.elementIterator();
while ( grantElements.hasNext() ) {
@ -2377,7 +2367,7 @@ public class Configuration implements Serializable {
for ( int i = 0; i < listeners.size() ; i++ ) {
listenerClasses[i] = ( (Element) listeners.get( i ) ).attributeValue( "class" );
}
log.debug( "Event listeners: " + type + "=" + StringHelper.toString( listenerClasses ) );
LOG.eventListeners(type, StringHelper.toString(listenerClasses));
setListeners( type, listenerClasses );
}
@ -2387,7 +2377,7 @@ public class Configuration implements Serializable {
throw new MappingException( "No type specified for listener" );
}
String impl = element.attributeValue( "class" );
log.debug( "Event listener: " + type + "=" + impl );
LOG.eventListener(type, impl);
setListeners( type, new String[]{impl} );
}
@ -3129,17 +3119,9 @@ public class Configuration implements Serializable {
public void addImport(String entityName, String rename) throws DuplicateMappingException {
String existing = imports.put( rename, entityName );
if ( existing != null ) {
if ( existing.equals( entityName ) ) {
log.info( "duplicate import: {} -> {}", entityName, rename );
}
else {
throw new DuplicateMappingException(
"duplicate import: " + rename + " refers to both " + entityName +
" and " + existing + " (try using auto-import=\"false\")",
"import",
rename
);
}
if (existing.equals(entityName)) LOG.duplicateImport(entityName, rename);
else throw new DuplicateMappingException("duplicate import: " + rename + " refers to both " + entityName + " and "
+ existing + " (try using auto-import=\"false\")", "import", rename);
}
}
@ -3308,7 +3290,7 @@ public class Configuration implements Serializable {
public void addTypeDef(String typeName, String typeClass, Properties paramMap) {
TypeDef def = new TypeDef( typeClass, paramMap );
typeDefs.put( typeName, def );
log.debug( "Added " + typeName + " with class " + typeClass );
LOG.addedType(typeName, typeClass);
}
public Map getFilterDefinitions() {
@ -3470,7 +3452,7 @@ public class Configuration implements Serializable {
}
binding.addBinding( logicalName, physicalColumn );
}
public String getPhysicalColumnName(String logicalName, Table table) throws MappingException {
logicalName = logicalName.toLowerCase();
String finalName = null;
@ -3654,17 +3636,13 @@ public class Configuration implements Serializable {
public void addGenerator(IdGenerator generator) {
if ( !defaultNamedGenerators.contains( generator.getName() ) ) {
IdGenerator old = namedGenerators.put( generator.getName(), generator );
if ( old != null ) {
log.warn( "duplicate generator name {}", old.getName() );
}
if (old != null) LOG.duplicateGeneratorName(old.getName());
}
}
public void addGeneratorTable(String name, Properties params) {
Object old = generatorTables.put( name, params );
if ( old != null ) {
log.warn( "duplicate generator table: {}", name );
}
if (old != null) LOG.duplicateGeneratorTable(name);
}
public Properties getGeneratorTableProperties(String name, Map<String, Properties> localGeneratorTables) {
@ -3683,9 +3661,7 @@ public class Configuration implements Serializable {
public void addJoins(PersistentClass persistentClass, Map<String, Join> joins) {
Object old = Configuration.this.joins.put( persistentClass.getEntityName(), joins );
if ( old != null ) {
log.warn( "duplicate joins for class: {}", persistentClass.getEntityName() );
}
if (old != null) LOG.duplicateJoins(persistentClass.getEntityName());
}
public AnnotatedClassType getClassType(XClass clazz) {
@ -3938,7 +3914,7 @@ public class Configuration implements Serializable {
}
private void processHbmXmlQueue() {
log.debug( "Processing hbm.xml files" );
LOG.processingHbmFiles();
for ( Map.Entry<XmlDocument, Set<String>> entry : hbmMetadataToEntityNamesMap.entrySet() ) {
// Unfortunately we have to create a Mappings instance for each iteration here
processHbmXml( entry.getKey(), entry.getValue() );
@ -3954,7 +3930,7 @@ public class Configuration implements Serializable {
catch ( MappingException me ) {
throw new InvalidMappingException(
metadataXml.getOrigin().getType(),
metadataXml.getOrigin().getName(),
metadataXml.getOrigin().getName(),
me
);
}
@ -3968,7 +3944,7 @@ public class Configuration implements Serializable {
}
private void processAnnotatedClassesQueue() {
log.debug( "Process annotated classes" );
LOG.processAnnotatedClasses();
//bind classes in the correct order calculating some inheritance state
List<XClass> orderedClasses = orderAndFillHierarchy( annotatedClasses );
Mappings mappings = createMappings();
@ -4091,4 +4067,256 @@ public class Configuration implements Serializable {
public boolean isClass;
public boolean cacheLazy;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Added %s with class %s" )
void addedType( String typeName,
String typeClass );
@LogMessage( level = DEBUG )
@Message( value = "%s=%s" )
void attribute( String name,
String value );
@LogMessage( level = DEBUG )
@Message( value = "Preparing to build session factory with filters : %s" )
void buildingSessionFactory( Map<String, FilterDefinition> filterDefinitions );
@LogMessage( level = WARN )
@Message( value = "I/O reported cached file could not be found : %s : %s" )
void cachedFileNotFound( String path,
FileNotFoundException error );
@LogMessage( level = INFO )
@Message( value = "Configuration resource: %s" )
void configurationResource( String resource );
@LogMessage( level = INFO )
@Message( value = "Configured SessionFactory: %s" )
void configuredSessionFactory( String name );
@LogMessage( level = INFO )
@Message( value = "Configuring from file: %s" )
void configuringFromFile( String file );
@LogMessage( level = INFO )
@Message( value = "Configuring from resource: %s" )
void configuringFromResource( String resource );
@LogMessage( level = INFO )
@Message( value = "Configuring from URL: %s" )
void configuringFromUrl( URL url );
@LogMessage( level = INFO )
@Message( value = "Configuring from XML document" )
void configuringFromXmlDocument();
@LogMessage( level = WARN )
@Message( value = "Duplicate generator name %s" )
void duplicateGeneratorName( String name );
@LogMessage( level = WARN )
@Message( value = "Duplicate generator table: %s" )
void duplicateGeneratorTable( String name );
@LogMessage( level = INFO )
@Message( value = "Duplicate import: %s -> %s" )
void duplicateImport( String entityName,
String rename );
@LogMessage( level = WARN )
@Message( value = "Duplicate joins for class: %s" )
void duplicateJoins( String entityName );
@LogMessage( level = DEBUG )
@Message( value = "Event listener: %s=%s" )
void eventListener( String type,
String className );
@LogMessage( level = DEBUG )
@Message( value = "Event listeners: %s=%s" )
void eventListeners( String type,
String listenerClasses );
@LogMessage( level = INFO )
@Message( value = "Found mapping document in jar: %s" )
void foundMappingDocument( String name );
@LogMessage( level = WARN )
@Message( value = "Mapping metadata cache was not completely processed" )
void incompleteMappingMetadataCacheProcessing();
@LogMessage( level = INFO )
@Message( value = "JACC contextID: %s" )
void jaccContextId( String contextId );
@LogMessage( level = DEBUG )
@Message( value = "Legacy Validator not present in classpath, ignoring event listener registration" )
void legacyValidatorNotFound();
@LogMessage( level = DEBUG )
@Message( value = "Mapping Document:\n%s" )
void mappingDocument( org.w3c.dom.Document document );
@LogMessage( level = INFO )
@Message( value = "Mapping Package %s" )
void mappingPackage( String packageName );
@LogMessage( level = DEBUG )
@Message( value = "Mapping XML:\n%s" )
void mappingXml( String xml );
@LogMessage( level = DEBUG )
@Message( value = "Method %s() not found in %s" )
void methodNotFound( String searchStartupMethod,
String searchStartupClass );
@LogMessage( level = DEBUG )
@Message( value = "Process annotated classes" )
void processAnnotatedClasses();
@LogMessage( level = DEBUG )
@Message( value = "Processing association property references" )
void processingAssociationPropertyReferences();
@LogMessage( level = DEBUG )
@Message( value = "Processing collection mappings" )
void processingCollectionMappings();
@LogMessage( level = DEBUG )
@Message( value = "Processing extends queue" )
void processingExtendsQueue();
@LogMessage( level = DEBUG )
@Message( value = "Processing foreign key constraints" )
void processingForeignKeyConstraints();
@LogMessage( level = DEBUG )
@Message( value = "Processing fk mappings (*ToOne and JoinedSubclass)" )
void processingForeignKeyMappings();
@LogMessage( level = DEBUG )
@Message( value = "Processing hbm.xml files" )
void processingHbmFiles();
@LogMessage( level = DEBUG )
@Message( value = "Processing native query and ResultSetMapping mappings" )
void processingNativeQuery();
@LogMessage( level = DEBUG )
@Message( value = "Properties: %s" )
void properties( Properties properties );
@LogMessage( level = INFO )
@Message( value = "Reading mappings from cache file: %s" )
void readingCachedMappings( File cachedFile );
@LogMessage( level = DEBUG )
@Message( value = "Reading mapping document from URL : %s" )
void readingMappingDocument( String urlExternalForm );
@LogMessage( level = INFO )
@Message( value = "Reading mappings from file: %s" )
void readingMappingsFromFile( String path );
@LogMessage( level = INFO )
@Message( value = "Reading mappings from resource: %s" )
void readingMappingsFromResource( String resourceName );
@LogMessage( level = DEBUG )
@Message( value = "Resolving reference to class: %s" )
void resolvingReference( String referencedEntityName );
@LogMessage( level = INFO )
@Message( value = "Searching for mapping documents in jar: %s" )
void searchingForMappingDocuments( String name );
@LogMessage( level = DEBUG )
@Message( value = "Search not present in classpath, ignoring event listener registration." )
void searchNotFound();
@LogMessage( level = DEBUG )
@Message( value = "Session-factory config [%s] named class [%s] for mapping" )
void sessionFactoryConfigClassForMapping( String configName,
String name );
@LogMessage( level = DEBUG )
@Message( value = "Session-factory config [%s] named file [%s] for mapping" )
void sessionFactoryConfigFileForMapping( String configName,
String name );
@LogMessage( level = DEBUG )
@Message( value = "Session-factory config [%s] named jar file [%s] for mapping" )
void sessionFactoryConfigJarForMapping( String configName,
String name );
@LogMessage( level = DEBUG )
@Message( value = "Session-factory config [%s] named package [%s] for mapping" )
void sessionFactoryConfigPackageForMapping( String configName,
String name );
@LogMessage( level = DEBUG )
@Message( value = "Session-factory config [%s] named resource [%s] for mapping" )
void sessionFactoryConfigResourceForMapping( String configName,
String name );
@LogMessage( level = TRACE )
@Message( value = "Starting secondPassCompile() processing" )
void startingSecondPassCompile();
@LogMessage( level = WARN )
@Message( value = "Unable to apply constraints on DDL for %s : %s" )
void unableToApplyConstraints( String className,
String message );
@LogMessage( level = TRACE )
@Message( value = "Was unable to close input stream" )
void unableToCloseInputStream();
@LogMessage( level = WARN )
@Message( value = "Could not close input stream for %s : %s" )
void unableToCloseInputStream( String resourceName,
String message );
@LogMessage( level = ERROR )
@Message( value = "Could not close jar: %s" )
void unableToCloseJar( String message );
@LogMessage( level = WARN )
@Message( value = "Could not deserialize cache file: %s : %s" )
void unableToDeserializeCache( String path,
SerializationException error );
@LogMessage( level = DEBUG )
@Message( value = "Unable to execute %s, ignoring event listener registration." )
void unableToExecute( String searchStartupMethod );
@LogMessage( level = DEBUG )
@Message( value = "Unable to instantiate %s, ignoring event listener registration." )
void unableToInstantiate( String searchStartupClass );
@LogMessage( level = ERROR )
@Message( value = "Could not parse the package-level metadata [%s]" )
void unableToParseMetadata( String packageName );
@LogMessage( level = WARN )
@Message( value = "I/O reported error writing cached file : %s: %s" )
void unableToWriteCachedFile( String path,
String message );
@LogMessage( level = INFO )
@Message( value = "Hibernate Validator not found: ignoring" )
void validatorNotFound();
@LogMessage( level = DEBUG )
@Message( value = "Writing cache file for: %s to: %s" )
void writingCacheFile( File xmlFile,
File cachedFile );
}
}

View File

@ -23,13 +23,15 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.InputStream;
import org.hibernate.util.DTDEntityResolver;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Resolve JPA xsd files locally
@ -39,7 +41,8 @@ import org.slf4j.LoggerFactory;
public class EJB3DTDEntityResolver extends DTDEntityResolver {
public static final EntityResolver INSTANCE = new EJB3DTDEntityResolver();
private final Logger log = LoggerFactory.getLogger( EJB3DTDEntityResolver.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
EJB3DTDEntityResolver.class.getPackage().getName());
boolean resolved = false;
@ -51,8 +54,9 @@ public class EJB3DTDEntityResolver extends DTDEntityResolver {
return resolved;
}
public InputSource resolveEntity(String publicId, String systemId) {
log.trace("Resolving XML entity {} : {}", publicId, systemId);
@Override
public InputSource resolveEntity(String publicId, String systemId) {
LOG.resolvingXmlEntity(publicId, systemId);
InputSource is = super.resolveEntity( publicId, systemId );
if ( is == null ) {
if ( systemId != null ) {
@ -88,25 +92,45 @@ public class EJB3DTDEntityResolver extends DTDEntityResolver {
private InputSource buildInputSource(String publicId, String systemId, InputStream dtdStream, boolean resolved) {
if ( dtdStream == null ) {
log.trace( "unable to locate [{}] on classpath", systemId );
LOG.unableToLocate(systemId);
return null;
}
else {
log.trace( "located [{}] in classpath", systemId );
InputSource source = new InputSource( dtdStream );
source.setPublicId( publicId );
source.setSystemId( systemId );
this.resolved = resolved;
return source;
}
LOG.located(systemId);
InputSource source = new InputSource(dtdStream);
source.setPublicId(publicId);
source.setSystemId(systemId);
this.resolved = resolved;
return source;
}
private InputStream getStreamFromClasspath(String fileName) {
log.trace(
"recognized JPA ORM namespace; attempting to resolve on classpath under org/hibernate/ejb"
);
LOG.resolvingFileName();
String path = "org/hibernate/ejb/" + fileName;
InputStream dtdStream = resolveInHibernateNamespace( path );
return dtdStream;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Located [%s] in classpath" )
void located( String systemId );
@LogMessage( level = TRACE )
@Message( value = "Recognized JPA ORM namespace; attempting to resolve on classpath under org/hibernate/ejb" )
void resolvingFileName();
@LogMessage( level = TRACE )
@Message( value = "Resolving XML entity %s : %s" )
void resolvingXmlEntity( String publicId,
String systemId );
@LogMessage( level = TRACE )
@Message( value = "Unable to locate [%s] on classpath" )
void unableToLocate( String systemId );
}
}

View File

@ -23,8 +23,8 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.util.Map;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.annotations.ColumnTransformer;
@ -38,8 +38,10 @@ import org.hibernate.mapping.Join;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
import org.hibernate.util.StringHelper;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Wrap state of an EJB3 @Column annotation
@ -48,7 +50,10 @@ import org.slf4j.Logger;
* @author Emmanuel Bernard
*/
public class Ejb3Column {
private static final Logger log = LoggerFactory.getLogger( Ejb3Column.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Ejb3Column.class.getPackage().getName());
private Column mappingColumn;
private boolean insertable = true;
private boolean updatable = true;
@ -103,7 +108,7 @@ public class Ejb3Column {
public boolean isFormula() {
return StringHelper.isNotEmpty( formulaString );
}
public String getFormulaString() {
return formulaString;
}
@ -181,7 +186,7 @@ public class Ejb3Column {
public void bind() {
if ( StringHelper.isNotEmpty( formulaString ) ) {
log.debug( "binding formula {}", formulaString );
LOG.bindingFormula(formulaString);
formula = new Formula();
formula.setFormula( formulaString );
}
@ -189,7 +194,7 @@ public class Ejb3Column {
initMappingColumn(
logicalColumnName, propertyName, length, precision, scale, nullable, sqlType, unique, true
);
log.debug( "Binding column: " + toString());
LOG.bindingColumn(toString());
}
}
@ -218,7 +223,7 @@ public class Ejb3Column {
this.mappingColumn.setNullable( nullable );
this.mappingColumn.setSqlType( sqlType );
this.mappingColumn.setUnique( unique );
if(writeExpression != null && !writeExpression.matches("[^?]*\\?[^?]*")) {
throw new AnnotationException(
"@WriteExpression must contain exactly one value placeholder ('?') character: property ["
@ -420,7 +425,7 @@ public class Ejb3Column {
throw new AnnotationException( "AttributeOverride.column() should override all columns for now" );
}
actualCols = overriddenCols.length == 0 ? null : overriddenCols;
log.debug( "Column(s) overridden for property {}", inferredData.getPropertyName() );
LOG.columnsOverridden(inferredData.getPropertyName());
}
if ( actualCols == null ) {
columns = buildImplicitColumn(
@ -495,7 +500,7 @@ public class Ejb3Column {
}
private void processExpression(ColumnTransformer annotation) {
String nonNullLogicalColumnName = logicalColumnName != null ? logicalColumnName : ""; //use the default for annotations
String nonNullLogicalColumnName = logicalColumnName != null ? logicalColumnName : ""; //use the default for annotations
if ( annotation != null &&
( StringHelper.isEmpty( annotation.forColumn() )
|| annotation.forColumn().equals( nonNullLogicalColumnName ) ) ) {
@ -551,14 +556,14 @@ public class Ejb3Column {
public static void checkPropertyConsistency(Ejb3Column[] columns, String propertyName) {
int nbrOfColumns = columns.length;
if ( nbrOfColumns > 1 ) {
for (int currentIndex = 1; currentIndex < nbrOfColumns; currentIndex++) {
if (columns[currentIndex].isFormula() || columns[currentIndex - 1].isFormula()) {
continue;
}
if ( columns[currentIndex].isInsertable() != columns[currentIndex - 1].isInsertable() ) {
throw new AnnotationException(
"Mixing insertable and non insertable columns in a property is not allowed: " + propertyName
@ -581,7 +586,7 @@ public class Ejb3Column {
}
}
}
}
public void addIndex(Index index, boolean inSecondPass) {
@ -626,4 +631,23 @@ public class Ejb3Column {
sb.append( '}' );
return sb.toString();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Binding column: %s" )
void bindingColumn( String column );
@LogMessage( level = DEBUG )
@Message( value = "Binding formula %s" )
void bindingFormula( String formula );
@LogMessage( level = DEBUG )
@Message( value = "Column(s) overridden for property %s" )
void columnsOverridden( String propertyName );
}
}

View File

@ -23,6 +23,9 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
@ -32,15 +35,15 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.Version;
import org.hibernate.bytecode.BytecodeProvider;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.ConfigHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
@ -532,7 +535,7 @@ public final class Environment {
public static final String JPAQL_STRICT_COMPLIANCE= "hibernate.query.jpaql_strict_compliance";
/**
* When using pooled {@link org.hibernate.id.enhanced.Optimizer optimizers}, prefer interpreting the
* When using pooled {@link org.hibernate.id.enhanced.Optimizer optimizers}, prefer interpreting the
* database value as the lower (lo) boundary. The default is to interpret it as the high boundary.
*/
public static final String PREFER_POOLED_VALUES_LO = "hibernate.id.optimizer.pooled.prefer_lo";
@ -566,7 +569,8 @@ public final class Environment {
private static final Map OBSOLETE_PROPERTIES = new HashMap();
private static final Map RENAMED_PROPERTIES = new HashMap();
private static final Logger log = LoggerFactory.getLogger(Environment.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Environment.class.getPackage().getName());
/**
* Issues warnings to the user when any obsolete or renamed property names are used.
@ -579,12 +583,10 @@ public final class Environment {
while ( iter.hasNext() ) {
final Object propertyName = iter.next();
Object newPropertyName = OBSOLETE_PROPERTIES.get( propertyName );
if ( newPropertyName != null ) {
log.warn( "Usage of obsolete property: " + propertyName + " no longer supported, use: " + newPropertyName );
}
if (newPropertyName != null) LOG.unsupportedProperty(propertyName, newPropertyName);
newPropertyName = RENAMED_PROPERTIES.get( propertyName );
if ( newPropertyName != null ) {
log.warn( "Property [" + propertyName + "] has been renamed to [" + newPropertyName + "]; update your properties appropriately" );
LOG.renamedProperty(propertyName, newPropertyName);
if ( ! props.containsKey( newPropertyName ) ) {
propertiesToAdd.put( newPropertyName, props.get( propertyName ) );
}
@ -595,7 +597,7 @@ public final class Environment {
static {
log.info( "Hibernate " + Version.getVersionString() );
LOG.version(Version.getVersionString());
RENAMED_PROPERTIES.put( "hibernate.cglib.use_reflection_optimizer", USE_REFLECTION_OPTIMIZER );
@ -613,29 +615,29 @@ public final class Environment {
InputStream stream = ConfigHelper.getResourceAsStream("/hibernate.properties");
try {
GLOBAL_PROPERTIES.load(stream);
log.info( "loaded properties from resource hibernate.properties: " + ConfigurationHelper.maskOut(GLOBAL_PROPERTIES, PASS) );
LOG.propertiesLoaded(ConfigurationHelper.maskOut(GLOBAL_PROPERTIES, PASS));
}
catch (Exception e) {
log.error("problem loading properties from hibernate.properties");
LOG.unableToloadProperties();
}
finally {
try{
stream.close();
}
catch (IOException ioe){
log.error("could not close stream on hibernate.properties", ioe);
LOG.unableToCloseStream(ioe);
}
}
}
catch (HibernateException he) {
log.info("hibernate.properties not found");
LOG.propertiesNotFound();
}
try {
GLOBAL_PROPERTIES.putAll( System.getProperties() );
}
catch (SecurityException se) {
log.warn("could not copy system properties, system properties will be ignored");
LOG.unableToCopySystemProperties();
}
verifyProperties(GLOBAL_PROPERTIES);
@ -643,12 +645,8 @@ public final class Environment {
ENABLE_BINARY_STREAMS = ConfigurationHelper.getBoolean(USE_STREAMS_FOR_BINARY, GLOBAL_PROPERTIES);
ENABLE_REFLECTION_OPTIMIZER = ConfigurationHelper.getBoolean(USE_REFLECTION_OPTIMIZER, GLOBAL_PROPERTIES);
if (ENABLE_BINARY_STREAMS) {
log.info("using java.io streams to persist binary types");
}
if (ENABLE_REFLECTION_OPTIMIZER) {
log.info("using bytecode reflection optimizer");
}
if (ENABLE_BINARY_STREAMS) LOG.usingStreams();
if (ENABLE_REFLECTION_OPTIMIZER) LOG.usingReflectionOptimizer();
BYTECODE_PROVIDER_INSTANCE = buildBytecodeProvider( GLOBAL_PROPERTIES );
boolean getGeneratedKeysSupport;
@ -660,9 +658,7 @@ public final class Environment {
getGeneratedKeysSupport = false;
}
JVM_SUPPORTS_GET_GENERATED_KEYS = getGeneratedKeysSupport;
if (!JVM_SUPPORTS_GET_GENERATED_KEYS) {
log.info("JVM does not support Statement.getGeneratedKeys()");
}
if (!JVM_SUPPORTS_GET_GENERATED_KEYS) LOG.generatedKeysNotSupported();
boolean linkedHashSupport;
try {
@ -673,25 +669,17 @@ public final class Environment {
linkedHashSupport = false;
}
JVM_SUPPORTS_LINKED_HASH_COLLECTIONS = linkedHashSupport;
if (!JVM_SUPPORTS_LINKED_HASH_COLLECTIONS) {
log.info("JVM does not support LinkedHasMap, LinkedHashSet - ordered maps and sets disabled");
}
if (!JVM_SUPPORTS_LINKED_HASH_COLLECTIONS) LOG.linkedMapsAndSetsNotSupported();
long x = 123456789;
JVM_HAS_TIMESTAMP_BUG = new Timestamp(x).getTime() != x;
if (JVM_HAS_TIMESTAMP_BUG) {
log.info("using workaround for JVM bug in java.sql.Timestamp");
}
if (JVM_HAS_TIMESTAMP_BUG) LOG.usingTimestampWorkaround();
Timestamp t = new Timestamp(0);
t.setNanos(5 * 1000000);
JVM_HAS_JDK14_TIMESTAMP = t.getTime() == 5;
if (JVM_HAS_JDK14_TIMESTAMP) {
log.info("using JDK 1.4 java.sql.Timestamp handling");
}
else {
log.info("using pre JDK 1.4 java.sql.Timestamp handling");
}
if (JVM_HAS_JDK14_TIMESTAMP) LOG.usingJdk14TimestampHandling();
else LOG.usingPreJdk14TimestampHandling();
}
public static BytecodeProvider getBytecodeProvider() {
@ -718,7 +706,8 @@ public final class Environment {
*
* @deprecated Starting with 3.3 Hibernate requires JDK 1.4 or higher
*/
public static boolean jvmHasJDK14Timestamp() {
@Deprecated
public static boolean jvmHasJDK14Timestamp() {
return JVM_HAS_JDK14_TIMESTAMP;
}
@ -733,7 +722,8 @@ public final class Environment {
* @see java.util.LinkedHashSet
* @see java.util.LinkedHashMap
*/
public static boolean jvmSupportsLinkedHashCollections() {
@Deprecated
public static boolean jvmSupportsLinkedHashCollections() {
return JVM_SUPPORTS_LINKED_HASH_COLLECTIONS;
}
@ -747,7 +737,8 @@ public final class Environment {
* @see Statement
* @deprecated Starting with 3.3 Hibernate requires JDK 1.4 or higher
*/
public static boolean jvmSupportsGetGeneratedKeys() {
@Deprecated
public static boolean jvmSupportsGetGeneratedKeys() {
return JVM_SUPPORTS_GET_GENERATED_KEYS;
}
@ -806,7 +797,7 @@ public final class Environment {
public static BytecodeProvider buildBytecodeProvider(Properties properties) {
String provider = ConfigurationHelper.getString( BYTECODE_PROVIDER, properties, "javassist" );
log.info( "Bytecode provider name : " + provider );
LOG.bytecodeProvider(provider);
return buildBytecodeProvider( provider );
}
@ -818,8 +809,84 @@ public final class Environment {
return new org.hibernate.bytecode.cglib.BytecodeProviderImpl();
}
log.warn( "unrecognized bytecode provider [" + providerName + "], using javassist by default" );
LOG.unknownBytecodeProvider(providerName);
return new org.hibernate.bytecode.javassist.BytecodeProviderImpl();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Bytecode provider name : %s" )
void bytecodeProvider( String provider );
@LogMessage( level = INFO )
@Message( value = "JVM does not support Statement.getGeneratedKeys()" )
void generatedKeysNotSupported();
@LogMessage( level = INFO )
@Message( value = "JVM does not support LinkedHashMap, LinkedHashSet - ordered maps and sets disabled" )
void linkedMapsAndSetsNotSupported();
@LogMessage( level = INFO )
@Message( value = "Loaded properties from resource hibernate.properties: %s" )
void propertiesLoaded( Properties maskOut );
@LogMessage( level = INFO )
@Message( value = "hibernate.properties not found" )
void propertiesNotFound();
@LogMessage( level = WARN )
@Message( value = "Property [%s] has been renamed to [%s]; update your properties appropriately" )
void renamedProperty( Object propertyName,
Object newPropertyName );
@LogMessage( level = ERROR )
@Message( value = "Could not close stream on hibernate.properties: %s" )
void unableToCloseStream( IOException error );
@LogMessage( level = WARN )
@Message( value = "Could not copy system properties, system properties will be ignored" )
void unableToCopySystemProperties();
@LogMessage( level = ERROR )
@Message( value = "Problem loading properties from hibernate.properties" )
void unableToloadProperties();
@LogMessage( level = WARN )
@Message( value = "unrecognized bytecode provider [%s], using javassist by default" )
void unknownBytecodeProvider( String providerName );
@LogMessage( level = WARN )
@Message( value = "Usage of obsolete property: %s no longer supported, use: %s" )
void unsupportedProperty( Object propertyName,
Object newPropertyName );
@LogMessage( level = INFO )
@Message( value = "Using JDK 1.4 java.sql.Timestamp handling" )
void usingJdk14TimestampHandling();
@LogMessage( level = INFO )
@Message( value = "Using pre JDK 1.4 java.sql.Timestamp handling" )
void usingPreJdk14TimestampHandling();
@LogMessage( level = INFO )
@Message( value = "Using bytecode reflection optimizer" )
void usingReflectionOptimizer();
@LogMessage( level = INFO )
@Message( value = "Using java.io streams to persist binary types" )
void usingStreams();
@LogMessage( level = INFO )
@Message( value = "Using workaround for JVM bug in java.sql.Timestamp" )
void usingTimestampWorkaround();
@LogMessage( level = INFO )
@Message( value = "Hibernate %s" )
void version( String versionString );
}
}

View File

@ -23,13 +23,15 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Properties;
import java.util.StringTokenizer;
import org.dom4j.Attribute;
import org.dom4j.Document;
import org.dom4j.Element;
@ -52,6 +54,7 @@ import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.DependantValue;
import org.hibernate.mapping.FetchProfile;
import org.hibernate.mapping.Fetchable;
import org.hibernate.mapping.Filterable;
import org.hibernate.mapping.Formula;
@ -86,7 +89,6 @@ import org.hibernate.mapping.TypeDef;
import org.hibernate.mapping.UnionSubclass;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.mapping.Value;
import org.hibernate.mapping.FetchProfile;
import org.hibernate.persister.entity.JoinedSubclassEntityPersister;
import org.hibernate.persister.entity.SingleTableEntityPersister;
import org.hibernate.persister.entity.UnionSubclassEntityPersister;
@ -97,9 +99,10 @@ import org.hibernate.util.JoinedIterator;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.hibernate.util.xml.XmlDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Walks an XML mapping document and produces the Hibernate configuration-time metamodel (the
@ -109,7 +112,8 @@ import org.slf4j.LoggerFactory;
*/
public final class HbmBinder {
private static final Logger log = LoggerFactory.getLogger( HbmBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
HbmBinder.class.getPackage().getName());
/**
* Private constructor to disallow instantiation.
@ -230,7 +234,7 @@ public final class HbmBinder {
String rename = ( renameNode == null ) ?
StringHelper.unqualify( className ) :
renameNode.getValue();
log.debug( "Import: " + rename + " -> " + className );
LOG.bindImport( rename, className );
mappings.addImport( className, rename );
}
@ -347,10 +351,7 @@ public final class HbmBinder {
entity.setTable( table );
bindComment(table, node);
log.info(
"Mapping class: " + entity.getEntityName() +
" -> " + entity.getTable().getName()
);
LOG.mappingClass(entity.getEntityName(), entity.getTable().getName());
// MUTABLE
Attribute mutableNode = node.attribute( "mutable" );
@ -846,10 +847,7 @@ public final class HbmBinder {
);
unionSubclass.setTable( mytable );
log.info(
"Mapping union-subclass: " + unionSubclass.getEntityName() +
" -> " + unionSubclass.getTable().getName()
);
LOG.mappingUnionSubclass(unionSubclass.getEntityName(), unionSubclass.getTable().getName());
createClassProperties( node, unionSubclass, mappings, inheritedMetas );
@ -866,10 +864,7 @@ public final class HbmBinder {
.setEntityPersisterClass( SingleTableEntityPersister.class );
}
log.info(
"Mapping subclass: " + subclass.getEntityName() +
" -> " + subclass.getTable().getName()
);
LOG.mappingSubclass(subclass.getEntityName(), subclass.getTable().getName());
// properties
createClassProperties( node, subclass, mappings, inheritedMetas );
@ -925,10 +920,7 @@ public final class HbmBinder {
joinedSubclass.setTable( mytable );
bindComment(mytable, node);
log.info(
"Mapping joined-subclass: " + joinedSubclass.getEntityName() +
" -> " + joinedSubclass.getTable().getName()
);
LOG.mappingJoinedSubclass(joinedSubclass.getEntityName(), joinedSubclass.getTable().getName());
// KEY
Element keyNode = node.element( "key" );
@ -990,10 +982,8 @@ public final class HbmBinder {
join.setOptional( "true".equals( nullNode.getValue() ) );
}
log.info(
"Mapping class join: " + persistentClass.getEntityName() +
" -> " + join.getTable().getName()
);
LOG.mappingClassJoin(persistentClass.getEntityName(), join.getTable().getName());
// KEY
Element keyNode = node.element( "key" );
@ -1302,7 +1292,7 @@ public final class HbmBinder {
// properties generated on update can never be updateable...
if ( property.isUpdateable() && generation == PropertyGeneration.ALWAYS ) {
if ( updateNode == null ) {
// updateable only because the user did not specify
// updateable only because the user did not specify
// anything; just override it
property.setUpdateable( false );
}
@ -1328,13 +1318,13 @@ public final class HbmBinder {
property.setLazy( lazyNode != null && "true".equals( lazyNode.getValue() ) );
}
if ( log.isDebugEnabled() ) {
if (LOG.isDebugEnabled()) {
String msg = "Mapped property: " + property.getName();
String columns = columns( property.getValue() );
if ( columns.length() > 0 ) msg += " -> " + columns;
// TODO: this fails if we run with debug on!
// if ( model.getType()!=null ) msg += ", type: " + model.getType().getName();
log.debug( msg );
LOG.mappedProperty(msg);
}
property.setMetaAttributes( getMetas( node, inheritedMetas ) );
@ -1378,9 +1368,7 @@ public final class HbmBinder {
if ( Environment.jvmSupportsLinkedHashCollections() || ( collection instanceof Bag ) ) {
collection.setOrderBy( orderNode.getValue() );
}
else {
log.warn( "Attribute \"order-by\" ignored in JDK1.3 or less" );
}
else LOG.attributeIgnored();
}
Attribute whereNode = node.attribute( "where" );
if ( whereNode != null ) {
@ -1489,10 +1477,7 @@ public final class HbmBinder {
collection.setCollectionTable( table );
bindComment(table, node);
log.info(
"Mapping collection: " + collection.getRole() +
" -> " + collection.getCollectionTable().getName()
);
LOG.mappingCollection(collection.getRole(), collection.getCollectionTable().getName());
}
// SORT
@ -1755,7 +1740,7 @@ public final class HbmBinder {
}
column.setCustomWrite( customWrite );
column.setCustomRead( node.attributeValue( "read" ) );
Element comment = node.element("comment");
if (comment!=null) column.setComment( comment.getTextTrim() );
@ -2080,7 +2065,7 @@ public final class HbmBinder {
if ( mappings.getSchemaName() != null ) {
params.setProperty(
PersistentIdentifierGenerator.SCHEMA,
mappings.getObjectNameNormalizer().normalizeIdentifierQuoting( mappings.getSchemaName() )
mappings.getObjectNameNormalizer().normalizeIdentifierQuoting( mappings.getSchemaName() )
);
}
if ( mappings.getCatalogName() != null ) {
@ -2475,10 +2460,7 @@ public final class HbmBinder {
oneToMany.setAssociatedClass( persistentClass );
collection.setCollectionTable( persistentClass.getTable() );
log.info(
"Mapping collection: " + collection.getRole() +
" -> " + collection.getCollectionTable().getName()
);
LOG.mappingCollection(collection.getRole(), collection.getCollectionTable().getName());
}
// CHECK
@ -2624,11 +2606,7 @@ public final class HbmBinder {
if ( condition==null) {
throw new MappingException("no filter condition found for filter: " + name);
}
log.debug(
"Applying many-to-many filter [" + name +
"] as [" + condition +
"] to role [" + collection.getRole() + "]"
);
LOG.applyingManyToManyFilter(name, condition, collection.getRole());
collection.addManyToManyFilter( name, condition );
}
}
@ -2661,7 +2639,7 @@ public final class HbmBinder {
String queryName = queryElem.attributeValue( "name" );
if (path!=null) queryName = path + '.' + queryName;
String query = queryElem.getText();
log.debug( "Named query: " + queryName + " -> " + query );
LOG.namedQuery(queryName, query);
boolean cacheable = "true".equals( queryElem.attributeValue( "cacheable" ) );
String region = queryElem.attributeValue( "cache-region" );
@ -2771,7 +2749,7 @@ public final class HbmBinder {
(IdentifierCollection) collection,
persistentClasses,
mappings,
inheritedMetas
inheritedMetas
);
}
@ -2789,7 +2767,7 @@ public final class HbmBinder {
(Map) collection,
persistentClasses,
mappings,
inheritedMetas
inheritedMetas
);
}
@ -2808,7 +2786,7 @@ public final class HbmBinder {
}
}
static class ListSecondPass extends CollectionSecondPass {
ListSecondPass(Element node, Mappings mappings, List collection, java.util.Map inheritedMetas) {
super( node, mappings, collection, inheritedMetas );
@ -2821,7 +2799,7 @@ public final class HbmBinder {
(List) collection,
persistentClasses,
mappings,
inheritedMetas
inheritedMetas
);
}
@ -2961,10 +2939,10 @@ public final class HbmBinder {
if ( meta == null ) {
meta = new MetaAttribute( name );
map.put( name, meta );
} else if (meta == inheritedAttribute) { // overriding inherited meta attribute. HBX-621 & HBX-793
meta = new MetaAttribute( name );
map.put( name, meta );
}
} else if (meta == inheritedAttribute) { // overriding inherited meta attribute. HBX-621 & HBX-793
meta = new MetaAttribute( name );
map.put( name, meta );
}
meta.addValue( metaNode.getText() );
}
return map;
@ -2994,7 +2972,7 @@ public final class HbmBinder {
private static void parseFilterDef(Element element, Mappings mappings) {
String name = element.attributeValue( "name" );
log.debug( "Parsing filter-def [" + name + "]" );
LOG.parsingFilterDefinition(name);
String defaultCondition = element.getTextTrim();
if ( StringHelper.isEmpty( defaultCondition ) ) {
defaultCondition = element.attributeValue( "condition" );
@ -3005,12 +2983,12 @@ public final class HbmBinder {
final Element param = (Element) params.next();
final String paramName = param.attributeValue( "name" );
final String paramType = param.attributeValue( "type" );
log.debug( "adding filter parameter : " + paramName + " -> " + paramType );
LOG.addingFilterParameter(paramName, paramType);
final Type heuristicType = mappings.getTypeResolver().heuristicType( paramType );
log.debug( "parameter heuristic type : " + heuristicType );
LOG.parameterHeuristicType(heuristicType);
paramMappings.put( paramName, heuristicType );
}
log.debug( "Parsed filter-def [" + name + "]" );
LOG.parsedFilterDefinition(name);
FilterDefinition def = new FilterDefinition( name, defaultCondition, paramMappings );
mappings.addFilterDefinition( def );
}
@ -3033,7 +3011,7 @@ public final class HbmBinder {
if ( condition==null) {
throw new MappingException("no filter condition found for filter: " + name);
}
log.debug( "Applying filter [" + name + "] as [" + condition + "]" );
LOG.applyingFilter(name, condition);
filterable.addFilter( name, condition );
}
@ -3169,4 +3147,87 @@ public final class HbmBinder {
private static interface EntityElementHandler {
public void handleEntity(String entityName, String className, Mappings mappings);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Applying filter [%s] as [%s]" )
void applyingFilter( String name,
String condition );
@LogMessage( level = DEBUG )
@Message( value = "Adding filter parameter : %s -> %s" )
void addingFilterParameter( String paramName,
String paramType );
@LogMessage( level = DEBUG )
@Message( value = "Applying many-to-many filter [%s] as [%s] to role [%s]" )
void applyingManyToManyFilter( String name,
String condition,
String role );
@LogMessage( level = WARN )
@Message( value = "Attribute \"order-by\" ignored in JDK1.3 or less" )
void attributeIgnored();
@LogMessage( level = DEBUG )
@Message( value = "Import: %s -> %s" )
void bindImport( String rename,
String className );
@LogMessage( level = DEBUG )
@Message( value = "%s" )
void mappedProperty( String message );
@LogMessage( level = INFO )
@Message( value = "Mapping class: %s -> %s" )
void mappingClass( String entityName,
String name );
@LogMessage( level = INFO )
@Message( value = "Mapping class join: %s -> %s" )
void mappingClassJoin( String entityName,
String name );
@LogMessage( level = INFO )
@Message( value = "Mapping collection: %s -> %s" )
void mappingCollection( String entityName,
String name );
@LogMessage( level = INFO )
@Message( value = "Mapping joined-subclass: %s -> %s" )
void mappingJoinedSubclass( String entityName,
String name );
@LogMessage( level = INFO )
@Message( value = "Mapping subclass: %s -> %s" )
void mappingSubclass( String entityName,
String name );
@LogMessage( level = INFO )
@Message( value = "Mapping union-subclass: %s -> %s" )
void mappingUnionSubclass( String entityName,
String name );
@LogMessage( level = DEBUG )
@Message( value = "Named query: %s -> %s" )
void namedQuery( String queryName,
String query );
@LogMessage( level = DEBUG )
@Message( value = "Parameter heuristic type : %s" )
void parameterHeuristicType( Type heuristicType );
@LogMessage( level = DEBUG )
@Message( value = "Parsed filter-def [%s]" )
void parsedFilterDefinition( String name );
@LogMessage( level = DEBUG )
@Message( value = "Parsing filter-def [%s]" )
void parsingFilterDefinition( String name );
}
}

View File

@ -23,24 +23,29 @@
*/
package org.hibernate.cfg;
import java.util.Map;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.util.ArrayList;
import java.util.Iterator;
import org.hibernate.MappingException;
import org.hibernate.util.StringHelper;
import org.hibernate.engine.NamedSQLQueryDefinition;
import org.hibernate.engine.ResultSetMappingDefinition;
import java.util.Map;
import org.dom4j.Attribute;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.MappingException;
import org.hibernate.engine.NamedSQLQueryDefinition;
import org.hibernate.engine.ResultSetMappingDefinition;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
*/
public class NamedSQLQuerySecondPass extends ResultSetMappingBinder implements QuerySecondPass {
private static Logger log = LoggerFactory.getLogger( NamedSQLQuerySecondPass.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
NamedSQLQuerySecondPass.class.getPackage().getName());
private Element queryElem;
private String path;
private Mappings mappings;
@ -115,7 +120,19 @@ public class NamedSQLQuerySecondPass extends ResultSetMappingBinder implements Q
);
}
log.debug( "Named SQL query: " + queryName + " -> " + namedQuery.getQueryString() );
LOG.namedSqlQuery(queryName, namedQuery.getQueryString());
mappings.addSQLQuery( queryName, namedQuery );
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Named SQL query: %s -> %s" )
void namedSqlQuery( String queryName,
String namedQuery );
}
}

View File

@ -26,6 +26,7 @@
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@ -37,10 +38,6 @@ import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Transient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.MappingException;
import org.hibernate.annotations.ManyToAny;
@ -49,6 +46,10 @@ import org.hibernate.annotations.Type;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A helper class to keep the {@code XProperty}s of a class ordered by access type.
@ -57,7 +58,12 @@ import org.hibernate.util.StringHelper;
*/
class PropertyContainer {
private static final Logger log = LoggerFactory.getLogger( AnnotationBinder.class );
static {
System.setProperty("jboss.i18n.generate-proxies", "true");
}
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
PropertyContainer.class.getPackage().getName());
private final AccessType explicitClassDefinedAccessType;
@ -149,12 +155,8 @@ class PropertyContainer {
// the access type for this property is explicitly set to AccessType.FIELD, hence we have to
// use field access for this property even if the default access type for the class is AccessType.PROPERTY
AccessType accessType = AccessType.getAccessStrategy( access.value() );
if ( accessType == AccessType.FIELD ) {
propertyAccessMap.put( property.getName(), property );
}
else { // AccessType.PROPERTY
log.warn( "Placing @Access(AccessType.PROPERTY) on a field does not have any effect." );
}
if (accessType == AccessType.FIELD) propertyAccessMap.put(property.getName(), property);
else LOG.annotationHasNoEffect(AccessType.FIELD);
}
for ( XProperty property : propertyAccessMap.values() ) {
@ -168,12 +170,8 @@ class PropertyContainer {
// see "2.3.2 Explicit Access Type" of JPA 2 spec
// the access type for this property is explicitly set to AccessType.PROPERTY, hence we have to
// return use method access even if the default class access type is AccessType.FIELD
if ( accessType == AccessType.PROPERTY ) {
fieldAccessMap.put( property.getName(), property );
}
else { // AccessType.FIELD
log.warn( "Placing @Access(AccessType.FIELD) on a property does not have any effect." );
}
if (accessType == AccessType.PROPERTY) fieldAccessMap.put(property.getName(), property);
else LOG.annotationHasNoEffect(AccessType.PROPERTY);
}
}
@ -280,6 +278,17 @@ class PropertyContainer {
|| "net.sf.cglib.transform.impl.InterceptFieldCallback".equals( property.getType().getName() )
|| "org.hibernate.bytecode.javassist.FieldHandler".equals( property.getType().getName() );
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Placing @Access(AccessType.%s) on a field does not have any effect." )
void annotationHasNoEffect( AccessType type );
}
}

View File

@ -23,22 +23,22 @@
*/
package org.hibernate.cfg;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.JdbcSupport;
import org.hibernate.bytecode.BytecodeProvider;
import org.hibernate.cache.QueryCacheFactory;
import org.hibernate.cache.RegionFactory;
import org.hibernate.cache.impl.NoCachingRegionFactory;
import org.hibernate.cache.impl.bridge.RegionFactoryCacheProviderBridge;
import org.hibernate.engine.jdbc.JdbcSupport;
import org.hibernate.engine.jdbc.batch.internal.BatchBuilder;
import org.hibernate.engine.jdbc.spi.ExtractedDatabaseMetaData;
import org.hibernate.engine.jdbc.spi.JdbcServices;
@ -51,6 +51,10 @@ import org.hibernate.transaction.TransactionManagerLookup;
import org.hibernate.transaction.TransactionManagerLookupFactory;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Reads configuration properties and builds a {@link Settings} instance.
@ -58,8 +62,11 @@ import org.hibernate.util.StringHelper;
* @author Gavin King
*/
public class SettingsFactory implements Serializable {
private static final Logger log = LoggerFactory.getLogger( SettingsFactory.class );
private static final long serialVersionUID = -1194386144994524825L;
private static final long serialVersionUID = -1194386144994524825L;
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
SettingsFactory.class.getPackage().getName());
public static final String DEF_CACHE_REG_FACTORY = NoCachingRegionFactory.class.getName();
@ -96,42 +103,42 @@ public class SettingsFactory implements Serializable {
settings.setTransactionManagerLookup( createTransactionManagerLookup(properties) );
boolean flushBeforeCompletion = ConfigurationHelper.getBoolean(Environment.FLUSH_BEFORE_COMPLETION, properties);
log.info("Automatic flush during beforeCompletion(): " + enabledDisabled(flushBeforeCompletion) );
LOG.autoFlush(enabledDisabled(flushBeforeCompletion));
settings.setFlushBeforeCompletionEnabled(flushBeforeCompletion);
boolean autoCloseSession = ConfigurationHelper.getBoolean(Environment.AUTO_CLOSE_SESSION, properties);
log.info("Automatic session close at end of transaction: " + enabledDisabled(autoCloseSession) );
LOG.autoSessionClose(enabledDisabled(autoCloseSession));
settings.setAutoCloseSessionEnabled(autoCloseSession);
//JDBC and connection settings:
int batchSize = ConfigurationHelper.getInt(Environment.STATEMENT_BATCH_SIZE, properties, 0);
if ( !meta.supportsBatchUpdates() ) batchSize = 0;
if (batchSize>0) log.info("JDBC batch size: " + batchSize);
if (batchSize>0) LOG.jdbcBatchSize(batchSize);
settings.setJdbcBatchSize(batchSize);
boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(Environment.BATCH_VERSIONED_DATA, properties, false);
if (batchSize>0) log.info("JDBC batch updates for versioned data: " + enabledDisabled(jdbcBatchVersionedData) );
if (batchSize > 0) LOG.jdbcBatchUpdates(enabledDisabled(jdbcBatchVersionedData));
settings.setJdbcBatchVersionedData(jdbcBatchVersionedData);
settings.setBatcherBuilder( createBatchBuilder(properties, batchSize) );
boolean useScrollableResultSets = ConfigurationHelper.getBoolean(Environment.USE_SCROLLABLE_RESULTSET, properties, meta.supportsScrollableResults());
log.info("Scrollable result sets: " + enabledDisabled(useScrollableResultSets) );
LOG.scrollabelResultSets(enabledDisabled(useScrollableResultSets));
settings.setScrollableResultSetsEnabled(useScrollableResultSets);
boolean wrapResultSets = ConfigurationHelper.getBoolean(Environment.WRAP_RESULT_SETS, properties, false);
log.debug( "Wrap result sets: " + enabledDisabled(wrapResultSets) );
LOG.wrapResultSets(enabledDisabled(wrapResultSets));
settings.setWrapResultSetsEnabled(wrapResultSets);
boolean useGetGeneratedKeys = ConfigurationHelper.getBoolean(Environment.USE_GET_GENERATED_KEYS, properties, meta.supportsGetGeneratedKeys());
log.info("JDBC3 getGeneratedKeys(): " + enabledDisabled(useGetGeneratedKeys) );
LOG.jdbc3GeneratedKeys(enabledDisabled(useGetGeneratedKeys));
settings.setGetGeneratedKeysEnabled(useGetGeneratedKeys);
Integer statementFetchSize = ConfigurationHelper.getInteger(Environment.STATEMENT_FETCH_SIZE, properties);
if (statementFetchSize!=null) log.info("JDBC result set fetch size: " + statementFetchSize);
if (statementFetchSize != null) LOG.jdbcResultSetFetchSize(statementFetchSize);
settings.setJdbcFetchSize(statementFetchSize);
String releaseModeName = ConfigurationHelper.getString( Environment.RELEASE_CONNECTIONS, properties, "auto" );
log.info( "Connection release mode: " + releaseModeName );
LOG.connectionReleaseMode(releaseModeName);
ConnectionReleaseMode releaseMode;
if ( "auto".equals(releaseModeName) ) {
releaseMode = transactionFactory.getDefaultReleaseMode();
@ -139,8 +146,8 @@ public class SettingsFactory implements Serializable {
else {
releaseMode = ConnectionReleaseMode.parse( releaseModeName );
if ( releaseMode == ConnectionReleaseMode.AFTER_STATEMENT &&
! jdbcServices.getConnectionProvider().supportsAggressiveRelease() ) {
log.warn( "Overriding release mode as connection provider does not support 'after_statement'" );
! jdbcServices.getConnectionProvider().supportsAggressiveRelease() ) {
LOG.unsupportedAfterStatement();
releaseMode = ConnectionReleaseMode.AFTER_TRANSACTION;
}
}
@ -150,50 +157,50 @@ public class SettingsFactory implements Serializable {
String defaultSchema = properties.getProperty(Environment.DEFAULT_SCHEMA);
String defaultCatalog = properties.getProperty(Environment.DEFAULT_CATALOG);
if (defaultSchema!=null) log.info("Default schema: " + defaultSchema);
if (defaultCatalog!=null) log.info("Default catalog: " + defaultCatalog);
if (defaultSchema != null) LOG.defaultSchema(defaultSchema);
if (defaultCatalog != null) LOG.defaultCatalog(defaultCatalog);
settings.setDefaultSchemaName(defaultSchema);
settings.setDefaultCatalogName(defaultCatalog);
Integer maxFetchDepth = ConfigurationHelper.getInteger(Environment.MAX_FETCH_DEPTH, properties);
if (maxFetchDepth!=null) log.info("Maximum outer join fetch depth: " + maxFetchDepth);
if (maxFetchDepth != null) LOG.maxOuterJoinFetchDepth(maxFetchDepth);
settings.setMaximumFetchDepth(maxFetchDepth);
int batchFetchSize = ConfigurationHelper.getInt(Environment.DEFAULT_BATCH_FETCH_SIZE, properties, 1);
log.info("Default batch fetch size: " + batchFetchSize);
LOG.defaultBatchFetchSize(batchFetchSize);
settings.setDefaultBatchFetchSize(batchFetchSize);
boolean comments = ConfigurationHelper.getBoolean(Environment.USE_SQL_COMMENTS, properties);
log.info( "Generate SQL with comments: " + enabledDisabled(comments) );
LOG.generateSqlWithComments(enabledDisabled(comments));
settings.setCommentsEnabled(comments);
boolean orderUpdates = ConfigurationHelper.getBoolean(Environment.ORDER_UPDATES, properties);
log.info( "Order SQL updates by primary key: " + enabledDisabled(orderUpdates) );
LOG.orderSqlUpdatesByPrimaryKey(enabledDisabled(orderUpdates));
settings.setOrderUpdatesEnabled(orderUpdates);
boolean orderInserts = ConfigurationHelper.getBoolean(Environment.ORDER_INSERTS, properties);
log.info( "Order SQL inserts for batching: " + enabledDisabled( orderInserts ) );
LOG.orderSqlInsertsForBatching(enabledDisabled(orderInserts));
settings.setOrderInsertsEnabled( orderInserts );
//Query parser settings:
settings.setQueryTranslatorFactory( createQueryTranslatorFactory(properties) );
Map querySubstitutions = ConfigurationHelper.toMap(Environment.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties);
log.info("Query language substitutions: " + querySubstitutions);
Map querySubstitutions = ConfigurationHelper.toMap(Environment.QUERY_SUBSTITUTIONS, " ,=;:\n\t\r\f", properties);
LOG.queryLanguageSubstitutions(querySubstitutions);
settings.setQuerySubstitutions(querySubstitutions);
boolean jpaqlCompliance = ConfigurationHelper.getBoolean( Environment.JPAQL_STRICT_COMPLIANCE, properties, false );
settings.setStrictJPAQLCompliance( jpaqlCompliance );
log.info( "JPA-QL strict compliance: " + enabledDisabled( jpaqlCompliance ) );
LOG.jpaQlStrictCompliance(enabledDisabled(jpaqlCompliance));
// Second-level / query cache:
boolean useSecondLevelCache = ConfigurationHelper.getBoolean(Environment.USE_SECOND_LEVEL_CACHE, properties, true);
log.info( "Second-level cache: " + enabledDisabled(useSecondLevelCache) );
LOG.secondLevelCache(enabledDisabled(useSecondLevelCache));
settings.setSecondLevelCacheEnabled(useSecondLevelCache);
boolean useQueryCache = ConfigurationHelper.getBoolean(Environment.USE_QUERY_CACHE, properties);
log.info( "Query cache: " + enabledDisabled(useQueryCache) );
LOG.queryCache(enabledDisabled(useQueryCache));
settings.setQueryCacheEnabled(useQueryCache);
// The cache provider is needed when we either have second-level cache enabled
@ -203,16 +210,16 @@ public class SettingsFactory implements Serializable {
boolean useMinimalPuts = ConfigurationHelper.getBoolean(
Environment.USE_MINIMAL_PUTS, properties, settings.getRegionFactory().isMinimalPutsEnabledByDefault()
);
log.info( "Optimize cache for minimal puts: " + enabledDisabled(useMinimalPuts) );
LOG.optimizeCacheForMinimalInputs(enabledDisabled(useMinimalPuts));
settings.setMinimalPutsEnabled(useMinimalPuts);
String prefix = properties.getProperty(Environment.CACHE_REGION_PREFIX);
if ( StringHelper.isEmpty(prefix) ) prefix=null;
if (prefix!=null) log.info("Cache region prefix: "+ prefix);
if (prefix != null) LOG.cacheRegionPrefix(prefix);
settings.setCacheRegionPrefix(prefix);
boolean useStructuredCacheEntries = ConfigurationHelper.getBoolean(Environment.USE_STRUCTURED_CACHE, properties, false);
log.info( "Structured second-level cache entries: " + enabledDisabled(useStructuredCacheEntries) );
LOG.structuredSecondLevelCacheEntries(enabledDisabled(useStructuredCacheEntries));
settings.setStructuredCacheEntriesEnabled(useStructuredCacheEntries);
if (useQueryCache) settings.setQueryCacheFactory( createQueryCacheFactory(properties) );
@ -220,7 +227,7 @@ public class SettingsFactory implements Serializable {
//Statistics and logging:
boolean showSql = ConfigurationHelper.getBoolean(Environment.SHOW_SQL, properties);
if (showSql) log.info("Echoing all SQL to stdout");
if (showSql) LOG.echoingSql();
// settings.setShowSqlEnabled(showSql);
boolean formatSql = ConfigurationHelper.getBoolean(Environment.FORMAT_SQL, properties);
@ -229,11 +236,11 @@ public class SettingsFactory implements Serializable {
settings.setSqlStatementLogger( new SQLStatementLogger( showSql, formatSql ) );
boolean useStatistics = ConfigurationHelper.getBoolean(Environment.GENERATE_STATISTICS, properties);
log.info( "Statistics: " + enabledDisabled(useStatistics) );
LOG.statistics( enabledDisabled(useStatistics) );
settings.setStatisticsEnabled(useStatistics);
boolean useIdentifierRollback = ConfigurationHelper.getBoolean(Environment.USE_IDENTIFIER_ROLLBACK, properties);
log.info( "Deleted entity synthetic identifier rollback: " + enabledDisabled(useIdentifierRollback) );
LOG.deletedEntitySyntheticIdentifierRollback(enabledDisabled(useIdentifierRollback));
settings.setIdentifierRollbackEnabled(useIdentifierRollback);
//Schema export:
@ -249,15 +256,15 @@ public class SettingsFactory implements Serializable {
settings.setImportFiles( properties.getProperty( Environment.HBM2DDL_IMPORT_FILES ) );
EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( Environment.DEFAULT_ENTITY_MODE ) );
log.info( "Default entity-mode: " + defaultEntityMode );
LOG.defaultEntityMode(defaultEntityMode);
settings.setDefaultEntityMode( defaultEntityMode );
boolean namedQueryChecking = ConfigurationHelper.getBoolean( Environment.QUERY_STARTUP_CHECKING, properties, true );
log.info( "Named query checking : " + enabledDisabled( namedQueryChecking ) );
LOG.namedQueryChecking(enabledDisabled(namedQueryChecking));
settings.setNamedQueryStartupCheckingEnabled( namedQueryChecking );
boolean checkNullability = ConfigurationHelper.getBoolean(Environment.CHECK_NULLABILITY, properties, true);
log.info( "Check Nullability in Core (should be disabled when Bean Validation is on): " + enabledDisabled(checkNullability) );
LOG.checkNullability(enabledDisabled(checkNullability));
settings.setCheckNullability(checkNullability);
@ -278,7 +285,7 @@ public class SettingsFactory implements Serializable {
return new org.hibernate.bytecode.cglib.BytecodeProviderImpl();
}
else {
log.debug( "using javassist as bytecode provider by default" );
LOG.usingJavassist();
return new org.hibernate.bytecode.javassist.BytecodeProviderImpl();
}
}
@ -291,7 +298,7 @@ public class SettingsFactory implements Serializable {
String queryCacheFactoryClassName = ConfigurationHelper.getString(
Environment.QUERY_CACHE_FACTORY, properties, "org.hibernate.cache.StandardQueryCacheFactory"
);
log.info("Query cache factory: " + queryCacheFactoryClassName);
LOG.queryCacheFactory(queryCacheFactoryClassName);
try {
return (QueryCacheFactory) ReflectHelper.classForName(queryCacheFactoryClassName).newInstance();
}
@ -312,7 +319,7 @@ public class SettingsFactory implements Serializable {
if ( regionFactoryClassName == null ) {
regionFactoryClassName = DEF_CACHE_REG_FACTORY;
}
log.info( "Cache region factory : " + regionFactoryClassName );
LOG.cacheRegionFactory(regionFactoryClassName);
try {
try {
return (RegionFactory) ReflectHelper.classForName( regionFactoryClassName )
@ -321,10 +328,7 @@ public class SettingsFactory implements Serializable {
}
catch ( NoSuchMethodException nsme ) {
// no constructor accepting Properties found, try no arg constructor
log.debug(
regionFactoryClassName + " did not provide constructor accepting java.util.Properties; " +
"attempting no-arg constructor."
);
LOG.constructorWithPropertiesNotFound(regionFactoryClassName);
return (RegionFactory) ReflectHelper.classForName( regionFactoryClassName ).newInstance();
}
}
@ -337,7 +341,7 @@ public class SettingsFactory implements Serializable {
String className = ConfigurationHelper.getString(
Environment.QUERY_TRANSLATOR, properties, "org.hibernate.hql.ast.ASTQueryTranslatorFactory"
);
log.info("Query translator: " + className);
LOG.queryTranslator(className);
try {
return (QueryTranslatorFactory) ReflectHelper.classForName(className).newInstance();
}
@ -355,7 +359,7 @@ public class SettingsFactory implements Serializable {
: new BatchBuilder();
}
else {
log.info("Batch factory: " + batchBuilderClass);
LOG.batcherFactory(batchBuilderClass);
try {
batchBuilder = (BatchBuilder) ReflectHelper.classForName(batchBuilderClass).newInstance();
}
@ -375,4 +379,192 @@ public class SettingsFactory implements Serializable {
return TransactionManagerLookupFactory.getTransactionManagerLookup(properties);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Automatic flush during beforeCompletion(): %s" )
void autoFlush( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Automatic session close at end of transaction: %s" )
void autoSessionClose( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Batcher factory: %s" )
void batcherFactory( String batcherClass );
@LogMessage( level = INFO )
@Message( value = "Cache region factory : %s" )
void cacheRegionFactory( String regionFactoryClassName );
@LogMessage( level = INFO )
@Message( value = "Cache region prefix: %s" )
void cacheRegionPrefix( String prefix );
@LogMessage( level = INFO )
@Message( value = "Check Nullability in Core (should be disabled when Bean Validation is on): %s" )
void checkNullability( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Connection release mode: %s" )
void connectionReleaseMode( String releaseModeName );
@LogMessage( level = INFO )
@Message( value = "%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor." )
void constructorWithPropertiesNotFound( String regionFactoryClassName );
@LogMessage( level = INFO )
// @formatter:off
@Message( value = "Database ->\n" +
" name : %s\n" +
" version : %s\n" +
" major : %s\n" +
" minor : %s" )
// @formatter:on
void database( String databaseProductName,
String databaseProductVersion,
int databaseMajorVersion,
int databaseMinorVersion );
@LogMessage( level = INFO )
@Message( value = "Default batch fetch size: %s" )
void defaultBatchFetchSize( int batchFetchSize );
@LogMessage( level = INFO )
@Message( value = "Default catalog: %s" )
void defaultCatalog( String defaultCatalog );
@LogMessage( level = INFO )
@Message( value = "Default entity-mode: %s" )
void defaultEntityMode( EntityMode defaultEntityMode );
@LogMessage( level = INFO )
@Message( value = "Default schema: %s" )
void defaultSchema( String defaultSchema );
@LogMessage( level = INFO )
@Message( value = "Deleted entity synthetic identifier rollback: %s" )
void deletedEntitySyntheticIdentifierRollback( String enabledDisabled );
@LogMessage( level = INFO )
// @formatter:off
@Message( value = "Driver ->\n" +
" name : %s\n" +
" version : %s\n" +
" major : %s\n" +
" minor : %s" )
// @formatter:on
void driver( String driverProductName,
String driverProductVersion,
int driverMajorVersion,
int driverMinorVersion );
@LogMessage( level = INFO )
@Message( value = "Echoing all SQL to stdout" )
void echoingSql();
@LogMessage( level = INFO )
@Message( value = "Generate SQL with comments: %s" )
void generateSqlWithComments( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "JDBC3 getGeneratedKeys(): %s" )
void jdbc3GeneratedKeys( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "JDBC batch size: %s" )
void jdbcBatchSize( int batchSize );
@LogMessage( level = INFO )
@Message( value = "JDBC batch updates for versioned data: %s" )
void jdbcBatchUpdates( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "JDBC result set fetch size: %s" )
void jdbcResultSetFetchSize( Integer statementFetchSize );
@LogMessage( level = INFO )
@Message( value = "JPA-QL strict compliance: %s" )
void jpaQlStrictCompliance( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Maximum outer join fetch depth: %s" )
void maxOuterJoinFetchDepth( Integer maxFetchDepth );
@LogMessage( level = INFO )
@Message( value = "Named query checking : %s" )
void namedQueryChecking( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Optimize cache for minimal puts: %s" )
void optimizeCacheForMinimalInputs( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Order SQL inserts for batching: %s" )
void orderSqlInsertsForBatching( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Order SQL updates by primary key: %s" )
void orderSqlUpdatesByPrimaryKey( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Query cache: %s" )
void queryCache( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Query cache factory: %s" )
void queryCacheFactory( String queryCacheFactoryClassName );
@LogMessage( level = INFO )
@Message( value = "Query language substitutions: %s" )
void queryLanguageSubstitutions( Map<String, String> querySubstitutions );
@LogMessage( level = INFO )
@Message( value = "Query translator: %s" )
void queryTranslator( String className );
@LogMessage( level = INFO )
@Message( value = "Scrollable result sets: %s" )
void scrollabelResultSets( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Second-level cache: %s" )
void secondLevelCache( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Statistics: %s" )
void statistics( String enabledDisabled );
@LogMessage( level = INFO )
@Message( value = "Structured second-level cache entries: %s" )
void structuredSecondLevelCacheEntries( String enabledDisabled );
@LogMessage( level = WARN )
@Message( value = "Could not obtain connection metadata: %s" )
void unableToObjectConnectionMetadata( SQLException error );
@LogMessage( level = WARN )
@Message( value = "Could not obtain connection to query metadata: %s" )
void unableToObjectConnectionToQueryMetadata( SQLException error );
@LogMessage( level = WARN )
@Message( value = "Overriding release mode as connection provider does not support 'after_statement'" )
void unsupportedAfterStatement();
@LogMessage( level = DEBUG )
@Message( value = "Using javassist as bytecode provider by default" )
void usingJavassist();
@LogMessage( level = WARN )
@Message( value = "Error building SQLExceptionConverter; using minimal converter" )
void usingMinimalConverter();
@LogMessage( level = INFO )
@Message( value = "Wrap result sets: %s" )
void wrapResultSets( String enabledDisabled );
}
}

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
@ -42,10 +44,6 @@ import javax.persistence.ManyToMany;
import javax.persistence.MapKey;
import javax.persistence.MapKeyColumn;
import javax.persistence.OneToMany;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.FetchMode;
import org.hibernate.MappingException;
@ -112,6 +110,10 @@ import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.SingleTableSubclass;
import org.hibernate.mapping.Table;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Base class for binding different types of collections to Hibernate configuration objects.
@ -121,7 +123,8 @@ import org.hibernate.util.StringHelper;
*/
@SuppressWarnings({"unchecked", "serial"})
public abstract class CollectionBinder {
private Logger log = LoggerFactory.getLogger( CollectionBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionBinder.class.getPackage().getName());
protected Collection collection;
protected String propertyName;
@ -372,8 +375,9 @@ public abstract class CollectionBinder {
public void bind() {
this.collection = createCollection( propertyHolder.getPersistentClass() );
log.debug( "Collection role: {}", StringHelper.qualify( propertyHolder.getPath(), propertyName ) );
collection.setRole( StringHelper.qualify( propertyHolder.getPath(), propertyName ) );
String role = StringHelper.qualify(propertyHolder.getPath(), propertyName);
LOG.collectionRole(role);
collection.setRole(role);
collection.setNodeName( propertyName );
if ( (property.isAnnotationPresent( org.hibernate.annotations.MapKey.class )
@ -624,7 +628,8 @@ public abstract class CollectionBinder {
final TableBinder assocTableBinder,
final Mappings mappings) {
return new CollectionSecondPass( mappings, collection ) {
public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) throws MappingException {
@Override
public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas) throws MappingException {
bindStarToManySecondPass(
persistentClasses, collType, fkJoinColumns, keyColumns, inverseColumns, elementColumns,
isEmbedded, property, unique, assocTableBinder, ignoreNotFound, mappings
@ -715,7 +720,7 @@ public abstract class CollectionBinder {
String hqlOrderBy,
Mappings mappings,
Map<XClass, InheritanceState> inheritanceStatePerClass) {
log.debug("Binding a OneToMany: {}.{} through a foreign key", propertyHolder.getEntityName(), propertyName);
LOG.bindingOneToMany(propertyHolder.getEntityName(), propertyName);
org.hibernate.mapping.OneToMany oneToMany = new org.hibernate.mapping.OneToMany( mappings, collection.getOwner() );
collection.setElement( oneToMany );
oneToMany.setReferencedEntityName( collectionType.getName() );
@ -742,9 +747,7 @@ public abstract class CollectionBinder {
column.setJoins( joins );
collection.setCollectionTable( column.getTable() );
}
log.info(
"Mapping collection: " + collection.getRole() + " -> " + collection.getCollectionTable().getName()
);
LOG.mappingCollection(collection.getRole(), collection.getCollectionTable().getName());
bindFilters( false );
bindCollectionSecondPass( collection, null, fkJoinColumns, cascadeDeleteEnabled, property, mappings );
if ( !collection.isInverse()
@ -972,7 +975,7 @@ public abstract class CollectionBinder {
//TODO check whether @ManyToOne @JoinTable in @IdClass used for @OrderBy works: doh!
table = "";
}
else if (pc == associatedClass
|| (associatedClass instanceof SingleTableSubclass && pc
.getMappedClass().isAssignableFrom(
@ -981,7 +984,7 @@ public abstract class CollectionBinder {
} else {
table = pc.getTable().getQuotedName() + ".";
}
Iterator propertyColumns = p.getColumnIterator();
while ( propertyColumns.hasNext() ) {
Selectable column = (Selectable) propertyColumns.next();
@ -1146,20 +1149,12 @@ public abstract class CollectionBinder {
PersistentClass collectionEntity = (PersistentClass) persistentClasses.get( collType.getName() );
boolean isCollectionOfEntities = collectionEntity != null;
ManyToAny anyAnn = property.getAnnotation( ManyToAny.class );
if ( log.isDebugEnabled() ) {
if (LOG.isDebugEnabled()) {
String path = collValue.getOwnerEntityName() + "." + joinColumns[0].getPropertyName();
if ( isCollectionOfEntities && unique ) {
log.debug( "Binding a OneToMany: {} through an association table", path );
}
else if ( isCollectionOfEntities ) {
log.debug( "Binding as ManyToMany: {}", path );
}
else if ( anyAnn != null ) {
log.debug( "Binding a ManyToAny: {}", path );
}
else {
log.debug( "Binding a collection of element: {}", path );
}
if (isCollectionOfEntities && unique) LOG.bindingOneToMany(path);
else if (isCollectionOfEntities) LOG.bindingManyToMany(path);
else if (anyAnn != null) LOG.bindingManyToAny(path);
else LOG.bindingCollection(path);
}
//check for user error
if ( !isCollectionOfEntities ) {
@ -1273,7 +1268,7 @@ public abstract class CollectionBinder {
element.setFetchMode( FetchMode.JOIN );
element.setLazy( false );
element.setIgnoreNotFound( ignoreNotFound );
// as per 11.1.38 of JPA-2 spec, default to primary key if no column is specified by @OrderBy.
// as per 11.1.38 of JPA-2 spec, default to primary key if no column is specified by @OrderBy.
if ( hqlOrderBy != null ) {
collValue.setManyToManyOrdering(
buildOrderByClauseFromHql( hqlOrderBy, collectionEntity, collValue.getRole() )
@ -1546,4 +1541,41 @@ public abstract class CollectionBinder {
public void setLocalGenerators(HashMap<String, IdGenerator> localGenerators) {
this.localGenerators = localGenerators;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Binding a collection of element: %s" )
void bindingCollection( String property );
@LogMessage( level = DEBUG )
@Message( value = "Binding a ManyToAny: %s" )
void bindingManyToAny( String property );
@LogMessage( level = DEBUG )
@Message( value = "Binding as ManyToMany: %s" )
void bindingManyToMany( String property );
@LogMessage( level = DEBUG )
@Message( value = "Binding a OneToMany: %s through an association table" )
void bindingOneToMany( String property );
@LogMessage( level = DEBUG )
@Message( value = "Binding a OneToMany: %s.%s through a foreign key" )
void bindingOneToMany( String entity,
String property );
@LogMessage( level = DEBUG )
@Message( value = "Collection role: %s" )
void collectionRole( String role );
@LogMessage( level = INFO )
@Message( value = "Mapping collection: %s -> %s" )
void mappingCollection( String role,
String collectionTable );
}
}

View File

@ -23,6 +23,9 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@ -34,7 +37,6 @@ import javax.persistence.JoinTable;
import javax.persistence.PrimaryKeyJoinColumn;
import javax.persistence.SecondaryTable;
import javax.persistence.SecondaryTables;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.EntityMode;
@ -67,10 +69,10 @@ import org.hibernate.cfg.BinderHelper;
import org.hibernate.cfg.Ejb3JoinColumn;
import org.hibernate.cfg.InheritanceState;
import org.hibernate.cfg.Mappings;
import org.hibernate.cfg.PropertyHolder;
import org.hibernate.cfg.ObjectNameSource;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.ObjectNameNormalizer;
import org.hibernate.cfg.ObjectNameSource;
import org.hibernate.cfg.PropertyHolder;
import org.hibernate.cfg.UniqueConstraintHolder;
import org.hibernate.engine.ExecuteUpdateResultCheckStyle;
import org.hibernate.engine.FilterDefinition;
@ -85,9 +87,10 @@ import org.hibernate.mapping.TableOwner;
import org.hibernate.mapping.Value;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Stateful holder and processor for binding Entity information
@ -95,11 +98,13 @@ import org.slf4j.LoggerFactory;
* @author Emmanuel Bernard
*/
public class EntityBinder {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
EntityBinder.class.getPackage().getName());
private String name;
private XClass annotatedClass;
private PersistentClass persistentClass;
private Mappings mappings;
private Logger log = LoggerFactory.getLogger( EntityBinder.class );
private String discriminatorValue = "";
private Boolean forceDiscriminator;
private Boolean insertableDiscriminator;
@ -124,8 +129,8 @@ public class EntityBinder {
private AccessType propertyAccessType = AccessType.DEFAULT;
private boolean wrapIdsInEmbeddedComponents;
private String subselect;
public boolean wrapIdsInEmbeddedComponents() {
return wrapIdsInEmbeddedComponents;
}
@ -237,14 +242,8 @@ public class EntityBinder {
}
}
else {
if ( explicitHibernateEntityAnnotation ) {
log.warn( "@org.hibernate.annotations.Entity used on a non root entity: ignored for {}",
annotatedClass.getName() );
}
if ( annotatedClass.isAnnotationPresent( Immutable.class ) ) {
log.warn( "@Immutable used on a non root entity: ignored for {}",
annotatedClass.getName() );
}
if (explicitHibernateEntityAnnotation) LOG.entityAnnotationOnNonRoot(annotatedClass.getName());
if (annotatedClass.isAnnotationPresent(Immutable.class)) LOG.immutableAnnotationOnNonRoot(annotatedClass.getName());
}
persistentClass.setOptimisticLockMode( getVersioning( optimisticLockType ) );
persistentClass.setSelectBeforeUpdate( selectBeforeUpdate );
@ -277,7 +276,7 @@ public class EntityBinder {
SQLDelete sqlDelete = annotatedClass.getAnnotation( SQLDelete.class );
SQLDeleteAll sqlDeleteAll = annotatedClass.getAnnotation( SQLDeleteAll.class );
Loader loader = annotatedClass.getAnnotation( Loader.class );
if ( sqlInsert != null ) {
persistentClass.setCustomSQLInsert( sqlInsert.sql().trim(), sqlInsert.callable(),
ExecuteUpdateResultCheckStyle.parse( sqlInsert.check().toString().toLowerCase() )
@ -305,18 +304,18 @@ public class EntityBinder {
if ( annotatedClass.isAnnotationPresent( Synchronize.class )) {
Synchronize synchronizedWith = annotatedClass.getAnnotation(Synchronize.class);
String [] tables = synchronizedWith.value();
for (String table : tables) {
persistentClass.addSynchronizedTable(table);
}
}
if ( annotatedClass.isAnnotationPresent(Subselect.class )) {
Subselect subselect = annotatedClass.getAnnotation(Subselect.class);
this.subselect = subselect.value();
}
}
//tuplizers
if ( annotatedClass.isAnnotationPresent( Tuplizers.class ) ) {
for (Tuplizer tuplizer : annotatedClass.getAnnotation( Tuplizers.class ).value()) {
@ -345,13 +344,8 @@ public class EntityBinder {
}
persistentClass.addFilter( filterName, cond );
}
}
else {
if ( filters.size() > 0 ) {
log.warn( "@Filter not allowed on subclasses (ignored): {}", persistentClass.getEntityName() );
}
}
log.debug( "Import with entity name {}", name );
} else if (filters.size() > 0) LOG.filterAnnotationOnSubclass(persistentClass.getEntityName());
LOG.importWithEntityName(name);
try {
mappings.addImport( persistentClass.getEntityName(), name );
String entityName = persistentClass.getEntityName();
@ -517,7 +511,7 @@ public class EntityBinder {
);
if ( persistentClass instanceof TableOwner ) {
log.info( "Bind entity {} on table {}", persistentClass.getEntityName(), table.getName() );
LOG.bindEntityOnTable(persistentClass.getEntityName(), table.getName());
( (TableOwner) persistentClass ).setTable( table );
}
else {
@ -747,9 +741,7 @@ public class EntityBinder {
//somehow keep joins() for later.
//Has to do the work later because it needs persistentClass id!
log.info(
"Adding secondary table to entity {} -> {}", persistentClass.getEntityName(), join.getTable().getName()
);
LOG.addingSecondaryTableToEntity(persistentClass.getEntityName(), join.getTable().getName());
org.hibernate.annotations.Table matchingTable = findMatchingComplimentTableAnnotation( join );
if ( matchingTable != null ) {
join.setSequentialSelect( FetchMode.JOIN != matchingTable.fetch() );
@ -930,4 +922,37 @@ public class EntityBinder {
return accessType;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Adding secondary table to entity %s -> %s" )
void addingSecondaryTableToEntity( String entity,
String table );
@LogMessage( level = INFO )
@Message( value = "Bind entity %s on table %s" )
void bindEntityOnTable( String entity,
String table );
@LogMessage( level = WARN )
@Message( value = "@org.hibernate.annotations.Entity used on a non root entity: ignored for %s" )
void entityAnnotationOnNonRoot( String className );
@LogMessage( level = WARN )
@Message( value = "@Filter not allowed on subclasses (ignored): %s" )
void filterAnnotationOnSubclass( String className );
@LogMessage( level = WARN )
@Message( value = "@Immutable used on a non root entity: ignored for %s" )
void immutableAnnotationOnNonRoot( String className );
@LogMessage( level = DEBUG )
@Message( value = "Import with entity name %s" )
void importWithEntityName( String entity );
}
}

View File

@ -23,11 +23,8 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.MappingException;
import org.hibernate.annotations.OrderBy;
@ -48,6 +45,10 @@ import org.hibernate.mapping.OneToMany;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Bind a list to the underlying Hibernate configuration
@ -57,21 +58,25 @@ import org.hibernate.util.StringHelper;
*/
@SuppressWarnings({"unchecked", "serial"})
public class ListBinder extends CollectionBinder {
private Logger log = LoggerFactory.getLogger( ListBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ListBinder.class.getPackage().getName());
public ListBinder() {
}
protected Collection createCollection(PersistentClass persistentClass) {
@Override
protected Collection createCollection(PersistentClass persistentClass) {
return new org.hibernate.mapping.List( getMappings(), persistentClass );
}
public void setSqlOrderBy(OrderBy orderByAnn) {
if ( orderByAnn != null ) log.warn( "@OrderBy not allowed for a indexed collection, annotation ignored." );
@Override
public void setSqlOrderBy(OrderBy orderByAnn) {
if (orderByAnn != null) LOG.orderByAnnotationIndexedCollection();
}
public void setSort(Sort sortAnn) {
if ( sortAnn != null ) log.warn( "@Sort not allowed for a indexed collection, annotation ignored." );
@Override
public void setSort(Sort sortAnn) {
if (sortAnn != null) LOG.sortAnnotationIndexedCollection();
}
@Override
@ -90,7 +95,8 @@ public class ListBinder extends CollectionBinder {
final TableBinder assocTableBinder,
final Mappings mappings) {
return new CollectionSecondPass( mappings, ListBinder.this.collection ) {
public void secondPass(Map persistentClasses, Map inheritedMetas)
@Override
public void secondPass(Map persistentClasses, Map inheritedMetas)
throws MappingException {
bindStarToManySecondPass(
persistentClasses, collType, fkJoinColumns, keyColumns, inverseColumns, elementColumns,
@ -141,4 +147,19 @@ public class ListBinder extends CollectionBinder {
);
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "@OrderBy not allowed for an indexed collection, annotation ignored." )
void orderByAnnotationIndexedCollection();
@LogMessage( level = WARN )
@Message( value = "@Sort not allowed for an indexed collection, annotation ignored." )
void sortAnnotationIndexedCollection();
}
}

View File

@ -23,13 +23,11 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import java.util.Map;
import javax.persistence.EmbeddedId;
import javax.persistence.Id;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.annotations.Generated;
import org.hibernate.annotations.GenerationTime;
@ -55,12 +53,18 @@ import org.hibernate.mapping.RootClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Value;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
*/
public class PropertyBinder {
private Logger log = LoggerFactory.getLogger( PropertyBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
PropertyBinder.class.getPackage().getName());
private String name;
private String returnedClassName;
private boolean lazy;
@ -172,7 +176,7 @@ public class PropertyBinder {
private Property makePropertyAndValue() {
validateBind();
log.debug( "binding property {} with lazy={}", name, lazy );
LOG.bindingPropertyWithLazy(name, lazy);
String containerClassName = holder == null ?
null :
holder.getClassName();
@ -252,7 +256,7 @@ public class PropertyBinder {
//used when the value is provided and the binding is done elsewhere
public Property makeProperty() {
validateMake();
log.debug( "Building property " + name );
LOG.buildingProperty(name);
Property prop = new Property();
prop.setName( name );
prop.setNodeName( name );
@ -309,7 +313,7 @@ public class PropertyBinder {
);
}
}
log.trace( "Cascading " + name + " with " + cascade );
LOG.cascadingProperty(name, cascade);
this.mappingProperty = prop;
return prop;
}
@ -337,4 +341,25 @@ public class PropertyBinder {
public void setInheritanceStatePerClass(Map<XClass, InheritanceState> inheritanceStatePerClass) {
this.inheritanceStatePerClass = inheritanceStatePerClass;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Binder property %s with lazy=%s" )
void bindingPropertyWithLazy( String property,
boolean lazy );
@LogMessage( level = DEBUG )
@Message( value = "Building property %s" )
void buildingProperty( String property );
@LogMessage( level = TRACE )
@Message( value = "Cascading %s with %s" )
void cascadingProperty( String property,
String cascade );
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.INFO;
import java.util.HashMap;
import javax.persistence.NamedNativeQueries;
import javax.persistence.NamedNativeQuery;
@ -31,7 +32,6 @@ import javax.persistence.NamedQuery;
import javax.persistence.QueryHint;
import javax.persistence.SqlResultSetMapping;
import javax.persistence.SqlResultSetMappings;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.CacheMode;
@ -46,8 +46,10 @@ import org.hibernate.engine.NamedQueryDefinition;
import org.hibernate.engine.NamedSQLQueryDefinition;
import org.hibernate.engine.query.sql.NativeSQLQueryReturn;
import org.hibernate.engine.query.sql.NativeSQLQueryRootReturn;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Query binder
@ -55,7 +57,8 @@ import org.slf4j.LoggerFactory;
* @author Emmanuel Bernard
*/
public abstract class QueryBinder {
private static final Logger log = LoggerFactory.getLogger( QueryBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
QueryBinder.class.getPackage().getName());
public static void bindQuery(NamedQuery queryAnn, Mappings mappings, boolean isDefault) {
if ( queryAnn == null ) return;
@ -83,7 +86,7 @@ public abstract class QueryBinder {
else {
mappings.addQuery( queryAnn.name(), query );
}
log.info( "Binding Named query: {} => {}", queryAnn.name(), queryAnn.query() );
LOG.bindingNamedQuery(queryAnn.name(), queryAnn.query());
}
@ -145,7 +148,7 @@ public abstract class QueryBinder {
else {
mappings.addSQLQuery( queryAnn.name(), query );
}
log.info( "Binding named native query: {} => {}", queryAnn.name(), queryAnn.query() );
LOG.bindingNamedNativeQuery(queryAnn.name(), queryAnn.query());
}
public static void bindNativeQuery(org.hibernate.annotations.NamedNativeQuery queryAnn, Mappings mappings) {
@ -199,7 +202,7 @@ public abstract class QueryBinder {
throw new NotYetImplementedException( "Pure native scalar queries are not yet supported" );
}
mappings.addSQLQuery( queryAnn.name(), query );
log.info( "Binding named native query: {} => {}", queryAnn.name(), queryAnn.query() );
LOG.bindingNamedNativeQuery(queryAnn.name(), queryAnn.query());
}
public static void bindQueries(NamedQueries queriesAnn, Mappings mappings, boolean isDefault) {
@ -248,7 +251,7 @@ public abstract class QueryBinder {
);
mappings.addQuery( queryAnn.name(), query );
if ( log.isInfoEnabled() ) log.info( "Binding named query: " + queryAnn.name() + " => " + queryAnn.query() );
LOG.bindingNamedQuery(queryAnn.name(), queryAnn.query());
}
private static FlushMode getFlushMode(FlushModeType flushModeType) {
@ -421,4 +424,21 @@ public abstract class QueryBinder {
}
return timeout;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Binding named native query: %s => %s" )
void bindingNamedNativeQuery( String name,
String query );
@LogMessage( level = INFO )
@Message( value = "Binding named query: %s => %s" )
void bindingNamedQuery( String name,
String query );
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.INFO;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@ -34,7 +35,6 @@ import javax.persistence.ColumnResult;
import javax.persistence.EntityResult;
import javax.persistence.FieldResult;
import javax.persistence.SqlResultSetMapping;
import org.hibernate.LockMode;
import org.hibernate.MappingException;
import org.hibernate.cfg.BinderHelper;
@ -49,14 +49,18 @@ import org.hibernate.mapping.Property;
import org.hibernate.mapping.ToOne;
import org.hibernate.mapping.Value;
import org.hibernate.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
*/
public class ResultsetMappingSecondPass implements QuerySecondPass {
private Logger log = LoggerFactory.getLogger( ResultsetMappingSecondPass.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ResultsetMappingSecondPass.class.getPackage().getName());
private SqlResultSetMapping ann;
private Mappings mappings;
private boolean isDefault;
@ -71,7 +75,7 @@ public class ResultsetMappingSecondPass implements QuerySecondPass {
//TODO add parameters checkings
if ( ann == null ) return;
ResultSetMappingDefinition definition = new ResultSetMappingDefinition( ann.name() );
log.info( "Binding resultset mapping: {}", definition.getName() );
LOG.bindingResultSetMapping(definition.getName());
int entityAliasIndex = 0;
@ -265,4 +269,15 @@ public class ResultsetMappingSecondPass implements QuerySecondPass {
}
return -1;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Binding result set mapping: %s" )
void bindingResultSetMapping( String mapping );
}
}

View File

@ -23,12 +23,15 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.WARN;
import org.hibernate.annotations.OrderBy;
import org.hibernate.cfg.Environment;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Bind a set.
@ -36,7 +39,8 @@ import org.slf4j.LoggerFactory;
* @author Matthew Inger
*/
public class SetBinder extends CollectionBinder {
private final Logger log = LoggerFactory.getLogger( SetBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
SetBinder.class.getPackage().getName());
public SetBinder() {
}
@ -45,19 +49,28 @@ public class SetBinder extends CollectionBinder {
super( sorted );
}
protected Collection createCollection(PersistentClass persistentClass) {
@Override
protected Collection createCollection(PersistentClass persistentClass) {
return new org.hibernate.mapping.Set( getMappings(), persistentClass );
}
public void setSqlOrderBy(OrderBy orderByAnn) {
@Override
public void setSqlOrderBy(OrderBy orderByAnn) {
// *annotation* binder, jdk 1.5, ... am i missing something?
if ( orderByAnn != null ) {
if ( Environment.jvmSupportsLinkedHashCollections() ) {
super.setSqlOrderBy( orderByAnn );
}
else {
log.warn( "Attribute \"order-by\" ignored in JDK1.3 or less" );
}
if (Environment.jvmSupportsLinkedHashCollections()) super.setSqlOrderBy(orderByAnn);
else LOG.orderByAttributeIgnored();
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Attribute \"order-by\" ignored in JDK1.3 or less" )
void orderByAttributeIgnored();
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.io.Serializable;
import java.sql.Types;
import java.util.Calendar;
@ -34,10 +35,6 @@ import javax.persistence.MapKeyEnumerated;
import javax.persistence.MapKeyTemporal;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.Hibernate;
@ -60,12 +57,18 @@ import org.hibernate.type.PrimitiveCharacterArrayClobType;
import org.hibernate.type.SerializableToBlobType;
import org.hibernate.type.WrappedMaterializedBlobType;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
*/
public class SimpleValueBinder {
private Logger log = LoggerFactory.getLogger( SimpleValueBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
SimpleValueBinder.class.getPackage().getName());
private String propertyName;
private String returnedClassName;
private Ejb3Column[] columns;
@ -300,7 +303,7 @@ public class SimpleValueBinder {
public SimpleValue make() {
validate();
log.debug( "building SimpleValue for {}", propertyName );
LOG.buildingSimpleValue(propertyName);
if ( table == null ) {
table = columns[0].getTable();
}
@ -338,7 +341,7 @@ public class SimpleValueBinder {
public void fillSimpleValue() {
log.debug( "Setting SimpleValue typeName for {}", propertyName );
LOG.settingSimpleValueTypeName(propertyName);
String type = BinderHelper.isEmptyAnnotationValue( explicitType ) ? returnedClassName : explicitType;
org.hibernate.mapping.TypeDef typeDef = mappings.getTypeDef( type );
@ -368,4 +371,19 @@ public class SimpleValueBinder {
public void setKey(boolean key) {
this.key = key;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "building SimpleValue for %s" )
void buildingSimpleValue( String propertyName );
@LogMessage( level = DEBUG )
@Message( value = "Setting SimpleValue typeName for %s" )
void settingSimpleValueTypeName( String propertyName );
}
}

View File

@ -23,23 +23,21 @@
*/
package org.hibernate.cfg.annotations;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.persistence.UniqueConstraint;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.annotations.Index;
import org.hibernate.cfg.Mappings;
import org.hibernate.util.StringHelper;
import org.hibernate.util.CollectionHelper;
import org.hibernate.cfg.BinderHelper;
import org.hibernate.cfg.Ejb3JoinColumn;
import org.hibernate.cfg.IndexOrUniqueKeySecondPass;
import org.hibernate.cfg.Mappings;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.ObjectNameNormalizer;
import org.hibernate.cfg.ObjectNameSource;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.UniqueConstraintHolder;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
@ -51,8 +49,12 @@ import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.ToOne;
import org.hibernate.mapping.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.util.CollectionHelper;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Table related operations
@ -62,7 +64,9 @@ import org.slf4j.LoggerFactory;
@SuppressWarnings("unchecked")
public class TableBinder {
//TODO move it to a getter/setter strategy
private static Logger log = LoggerFactory.getLogger( TableBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
TableBinder.class.getPackage().getName());
private String schema;
private String catalog;
private String name;
@ -147,7 +151,7 @@ public class TableBinder {
// ownerEntity can be null when the table name is explicitly set
final String ownerObjectName = isJPA2ElementCollection && ownerEntity != null ?
StringHelper.unqualify( ownerEntity ) : unquotedOwnerTable;
final ObjectNameSource nameSource = buildNameContext(
final ObjectNameSource nameSource = buildNameContext(
ownerObjectName,
unquotedAssocTable );
@ -201,7 +205,7 @@ public class TableBinder {
return new AssociationTableNameSource( name, logicalName );
}
public static Table buildAndFillTable(
String schema,
String catalog,
@ -228,7 +232,7 @@ public class TableBinder {
catalog,
realTableName,
isAbstract,
subselect,
subselect,
denormalizedSuperTable
);
}
@ -237,7 +241,7 @@ public class TableBinder {
schema,
catalog,
realTableName,
subselect,
subselect,
isAbstract
);
}
@ -271,7 +275,8 @@ public class TableBinder {
*
* @deprecated Use {@link #buildAndFillTable} instead.
*/
@SuppressWarnings({ "JavaDoc" })
@Deprecated
@SuppressWarnings({ "JavaDoc" })
public static Table fillTable(
String schema,
String catalog,
@ -338,7 +343,7 @@ public class TableBinder {
* Get the columns of the mapped-by property
* copy them and link the copy to the actual value
*/
log.debug("Retrieving property {}.{}", associatedClass.getEntityName(), mappedByProperty);
LOG.retreivingProperty(associatedClass.getEntityName(), mappedByProperty);
final Property property = associatedClass.getRecursiveProperty( columns[0].getMappedBy() );
Iterator mappedByColumns;
@ -445,7 +450,7 @@ public class TableBinder {
Iterator idColItr = referencedEntity.getKey().getColumnIterator();
org.hibernate.mapping.Column col;
Table table = referencedEntity.getTable(); //works cause the pk has to be on the primary table
if ( !idColItr.hasNext() ) log.debug( "No column in the identifier!" );
if (!idColItr.hasNext()) LOG.noColumnInIdentifier();
while ( idColItr.hasNext() ) {
boolean match = false;
//for each PK column, find the associated FK column.
@ -491,7 +496,7 @@ public class TableBinder {
Ejb3JoinColumn[] columns,
SimpleValue value) {
for (Ejb3JoinColumn joinCol : columns) {
Column synthCol = (Column) columnIterator.next();
Column synthCol = (Column) columnIterator.next();
if ( joinCol.isNameDeferred() ) {
//this has to be the default value
joinCol.linkValueUsingDefaultColumnNaming( synthCol, referencedEntity, value );
@ -524,7 +529,8 @@ public class TableBinder {
/**
* @deprecated Use {@link #buildUniqueConstraintHolders} instead
*/
@SuppressWarnings({ "JavaDoc" })
@Deprecated
@SuppressWarnings({ "JavaDoc" })
public static List<String[]> buildUniqueConstraints(UniqueConstraint[] constraintsArray) {
List<String[]> result = new ArrayList<String[]>();
if ( constraintsArray.length != 0 ) {
@ -572,4 +578,20 @@ public class TableBinder {
this.propertyName = propertyName;
this.name = null;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Retrieving property %s.%s" )
void retreivingProperty( String entityName,
String propertyName );
@LogMessage( level = DEBUG )
@Message( value = "No column in the identifier!" )
void noColumnInIdentifier();
}
}

View File

@ -24,6 +24,7 @@
package org.hibernate.cfg.annotations.reflection;
import static org.jboss.logging.Logger.Level.WARN;
import java.beans.Introspector;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
@ -115,12 +116,8 @@ import javax.persistence.TemporalType;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.persistence.Version;
import org.dom4j.Attribute;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AnnotationException;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CollectionOfElements;
@ -132,6 +129,10 @@ import org.hibernate.annotations.common.reflection.Filter;
import org.hibernate.annotations.common.reflection.ReflectionUtil;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Encapsulates the overriding of Java annotations from an EJB 3.0 descriptor.
@ -143,7 +144,8 @@ import org.hibernate.util.StringHelper;
*/
@SuppressWarnings("unchecked")
public class JPAOverridenAnnotationReader implements AnnotationReader {
private Logger log = LoggerFactory.getLogger( JPAOverridenAnnotationReader.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
JPAOverridenAnnotationReader.class.getPackage().getName());
private static final Map<Class, String> annotationToXml;
private static final String SCHEMA_VALIDATION = "Activate schema validation for more information";
private static final Filter FILTER = new Filter() {
@ -441,13 +443,7 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
}
for ( Element subelement : (List<Element>) element.elements() ) {
String propertyName = subelement.attributeValue( "name" );
if ( !properties.contains( propertyName ) ) {
log.warn(
"Property {} not found in class"
+ " but described in <mapping-file/> (possible typo error)",
StringHelper.qualify( className, propertyName )
);
}
if (!properties.contains(propertyName)) LOG.propertyNotFound(StringHelper.qualify(className, propertyName));
}
}
}
@ -2508,4 +2504,15 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
private Annotation[] getJavaAnnotations() {
return element.getAnnotations();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Property %s not found in class but described in <mapping-file/> (possible typo error)" )
void propertyNotFound( String property );
}
}

View File

@ -26,28 +26,30 @@
package org.hibernate.cfg.annotations.reflection;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.AccessType;
import org.dom4j.Document;
import org.dom4j.Element;
import org.hibernate.AnnotationException;
import org.hibernate.util.StringHelper;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
*/
public class XMLContext implements Serializable {
private Logger log = LoggerFactory.getLogger( XMLContext.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
XMLContext.class.getPackage().getName());
private Default globalDefaults;
private Map<String, Element> classOverriding = new HashMap<String, Element>();
private Map<String, Default> defaultsOverriding = new HashMap<String, Default>();
@ -90,7 +92,7 @@ public class XMLContext implements Serializable {
}
}
else {
log.warn( "Found more than one <persistence-unit-metadata>, subsequent ignored" );
LOG.duplicateMetadata();
}
}
@ -107,13 +109,13 @@ public class XMLContext implements Serializable {
setAccess( unitElement, entityMappingDefault );
defaultElements.add( root );
List<Element> entities = (List<Element>) root.elements( "entity" );
List<Element> entities = root.elements( "entity" );
addClass( entities, packageName, entityMappingDefault, addedClasses );
entities = (List<Element>) root.elements( "mapped-superclass" );
entities = root.elements( "mapped-superclass" );
addClass( entities, packageName, entityMappingDefault, addedClasses );
entities = (List<Element>) root.elements( "embeddable" );
entities = root.elements( "embeddable" );
addClass( entities, packageName, entityMappingDefault, addedClasses );
return addedClasses;
}
@ -157,7 +159,7 @@ public class XMLContext implements Serializable {
setAccess( access, localDefault );
defaultsOverriding.put( className, localDefault );
log.debug( "Adding XML overriding information for {}", className );
LOG.addingOverridingInformation(className);
addEntityListenerClasses( element, packageName, addedClasses );
}
}
@ -167,16 +169,13 @@ public class XMLContext implements Serializable {
Element listeners = element.element( "entity-listeners" );
if ( listeners != null ) {
@SuppressWarnings( "unchecked" )
List<Element> elements = (List<Element>) listeners.elements( "entity-listener" );
List<Element> elements = listeners.elements( "entity-listener" );
for (Element listener : elements) {
String listenerClassName = buildSafeClassName( listener.attributeValue( "class" ), packageName );
if ( classOverriding.containsKey( listenerClassName ) ) {
//maybe switch it to warn?
if ( "entity-listener".equals( classOverriding.get( listenerClassName ).getName() ) ) {
log.info(
"entity-listener duplication, first event definition will be used: {}",
listenerClassName
);
LOG.duplicateListener(listenerClassName);
continue;
}
else {
@ -187,7 +186,7 @@ public class XMLContext implements Serializable {
classOverriding.put( listenerClassName, listener );
}
}
log.debug( "Adding XML overriding information for listener: {}", localAddedClasses );
LOG.addingListenerOverridingInformation(localAddedClasses);
addedClasses.addAll( localAddedClasses );
return localAddedClasses;
}
@ -313,4 +312,27 @@ public class XMLContext implements Serializable {
public List<String> getDefaultEntityListeners() {
return defaultEntityListeners;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Adding XML overriding information for %s" )
void addingOverridingInformation( String className );
@LogMessage( level = DEBUG )
@Message( value = "Adding XML overriding information for listeners: %s" )
void addingListenerOverridingInformation( List<String> classNames );
@LogMessage( level = INFO )
@Message( value = "entity-listener duplication, first event definition will be used: %s" )
void duplicateListener( String className );
@LogMessage( level = WARN )
@Message( value = "Found more than one <persistence-unit-metadata>, subsequent ignored" )
void duplicateMetadata();
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg.beanvalidation;
import static org.jboss.logging.Logger.Level.TRACE;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
@ -33,10 +34,6 @@ import javax.validation.TraversableResolver;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.EntityMode;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.SessionFactoryImplementor;
@ -48,6 +45,10 @@ import org.hibernate.event.PreInsertEventListener;
import org.hibernate.event.PreUpdateEvent;
import org.hibernate.event.PreUpdateEventListener;
import org.hibernate.persister.entity.EntityPersister;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Event listener used to enable Bean Validation for insert/update/delete events.
@ -59,7 +60,9 @@ import org.hibernate.persister.entity.EntityPersister;
public class BeanValidationEventListener implements
PreInsertEventListener, PreUpdateEventListener, PreDeleteEventListener, Initializable {
private static final Logger log = LoggerFactory.getLogger( BeanValidationEventListener.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
BeanValidationEventListener.class.getPackage().getName());
private ValidatorFactory factory;
private ConcurrentHashMap<EntityPersister, Set<String>> associationsPerEntityPersister =
new ConcurrentHashMap<EntityPersister, Set<String>>();
@ -139,9 +142,7 @@ public class BeanValidationEventListener implements
new HashSet<ConstraintViolation<?>>( constraintViolations.size() );
Set<String> classNames = new HashSet<String>();
for ( ConstraintViolation<?> violation : constraintViolations ) {
if ( log.isTraceEnabled() ) {
log.trace( violation.toString() );
}
LOG.violation(violation);
propagatedViolations.add( violation );
classNames.add( violation.getLeafBean().getClass().getName() );
}
@ -173,4 +174,15 @@ public class BeanValidationEventListener implements
toString.append( "]" );
return toString.toString();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "%s" )
void violation( ConstraintViolation<?> violation );
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.cfg.beanvalidation;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -42,10 +43,6 @@ import javax.validation.constraints.Size;
import javax.validation.metadata.BeanDescriptor;
import javax.validation.metadata.ConstraintDescriptor;
import javax.validation.metadata.PropertyDescriptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
@ -60,6 +57,10 @@ import org.hibernate.mapping.Property;
import org.hibernate.mapping.SingleTableSubclass;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Emmanuel Bernard
@ -67,7 +68,8 @@ import org.hibernate.util.StringHelper;
*/
class TypeSafeActivator {
private static final Logger logger = LoggerFactory.getLogger( TypeSafeActivator.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
TypeSafeActivator.class.getPackage().getName());
private static final String FACTORY_PROPERTY = "javax.persistence.validation.factory";
@ -127,8 +129,8 @@ class TypeSafeActivator {
try {
applyDDL( "", persistentClass, clazz, factory, groups, true );
}
catch ( Exception e ) {
logger.warn( "Unable to apply constraints on DDL for " + className, e );
catch (Exception e) {
LOG.unableToApplyConstraints(className, e.getMessage());
}
}
}
@ -245,8 +247,8 @@ class TypeSafeActivator {
if ( !( property.getPersistentClass() instanceof SingleTableSubclass ) ) {
//single table should not be forced to null
if ( !property.isComposite() ) { //composite should not add not-null on all columns
@SuppressWarnings("unchecked")
Iterator<Column> iter = (Iterator<Column>) property.getColumnIterator();
@SuppressWarnings( "unchecked" )
Iterator<Column> iter = property.getColumnIterator();
while ( iter.hasNext() ) {
iter.next().setNullable( false );
hasNotNull = true;
@ -386,4 +388,15 @@ class TypeSafeActivator {
return factory;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Unable to apply constraints on DDL for %s\n%s" )
void unableToApplyConstraints( String className,
String message );
}
}

View File

@ -23,8 +23,9 @@
*/
package org.hibernate.cfg.search;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import java.util.Properties;
import org.hibernate.AnnotationException;
import org.hibernate.event.EventListeners;
import org.hibernate.event.PostCollectionRecreateEventListener;
@ -34,20 +35,23 @@ import org.hibernate.event.PostDeleteEventListener;
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.event.PostUpdateEventListener;
import org.hibernate.util.ReflectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Helper methods initializing Hibernate Search event listeners.
*
*
* @deprecated as of release 3.4.0.CR2, replaced by Hibernate Search's {@link org.hibernate.search.cfg.EventListenerRegister}
* @author Emmanuel Bernard
* @author Hardy Ferentschik
*/
@Deprecated
@Deprecated
public class HibernateSearchEventListenerRegister {
private static final Logger log = LoggerFactory.getLogger(HibernateSearchEventListenerRegister.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
HibernateSearchEventListenerRegister.class.getPackage().getName());
/**
* Class name of the class needed to enable Search.
@ -58,27 +62,26 @@ public class HibernateSearchEventListenerRegister {
* @deprecated as of release 3.4.0.CR2, replaced by Hibernate Search's {@link org.hibernate.search.cfg.EventListenerRegister#enableHibernateSearch(EventListeners, Properties)}
*/
@SuppressWarnings("unchecked")
@Deprecated
@Deprecated
public static void enableHibernateSearch(EventListeners eventListeners, Properties properties) {
// check whether search is explicitly enabled - if so there is nothing
// to do
String enableSearchListeners = properties.getProperty( "hibernate.search.autoregister_listeners" );
if("false".equalsIgnoreCase(enableSearchListeners )) {
log.info("Property hibernate.search.autoregister_listeners is set to false." +
" No attempt will be made to register Hibernate Search event listeners.");
LOG.willNotRegisterListeners();
return;
}
// add search events if the jar is available and class can be loaded
Class searchEventListenerClass = attemptToLoadSearchEventListener();
if ( searchEventListenerClass == null ) {
log.info("Unable to find {} on the classpath. Hibernate Search is not enabled.", FULL_TEXT_INDEX_EVENT_LISTENER_CLASS);
LOG.unableToFindListenerClass(FULL_TEXT_INDEX_EVENT_LISTENER_CLASS);
return;
}
Object searchEventListener = instantiateEventListener(searchEventListenerClass);
//TODO Generalize this. Pretty much the same code all the time. Reflecetion?
// TODO Generalize this. Pretty much the same code all the time. Reflection?
{
boolean present = false;
PostInsertEventListener[] listeners = eventListeners
@ -147,7 +150,7 @@ public class HibernateSearchEventListenerRegister {
eventListeners
.setPostDeleteEventListeners(new PostDeleteEventListener[] { (PostDeleteEventListener) searchEventListener });
}
}
}
{
boolean present = false;
PostCollectionRecreateEventListener[] listeners = eventListeners.getPostCollectionRecreateEventListeners();
@ -213,12 +216,12 @@ public class HibernateSearchEventListenerRegister {
new PostCollectionUpdateEventListener[] { (PostCollectionUpdateEventListener) searchEventListener }
);
}
}
}
}
/**
* Tries to load Hibernate Search event listener.
*
*
* @return An event listener instance in case the jar was available.
*/
private static Class<?> attemptToLoadSearchEventListener() {
@ -228,7 +231,7 @@ public class HibernateSearchEventListenerRegister {
FULL_TEXT_INDEX_EVENT_LISTENER_CLASS,
HibernateSearchEventListenerRegister.class);
} catch (ClassNotFoundException e) {
log.debug("Search not present in classpath, ignoring event listener registration.");
LOG.unableToFindListenerClass();
}
return searchEventListenerClass;
}
@ -243,4 +246,23 @@ public class HibernateSearchEventListenerRegister {
}
return searchEventListener;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Property hibernate.search.autoregister_listeners is set to false. No attempt will be made to register Hibernate Search event listeners." )
void willNotRegisterListeners();
@LogMessage( level = INFO )
@Message( value = "Unable to find %s on the classpath. Hibernate Search is not enabled." )
void unableToFindListenerClass( String className );
@LogMessage( level = DEBUG )
@Message( value = "Search not present in classpath, ignoring event listener registration." )
void unableToFindListenerClass();
}
}

View File

@ -24,6 +24,7 @@
*/
package org.hibernate.collection;
import static org.jboss.logging.Logger.Level.ERROR;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.sql.ResultSet;
@ -31,15 +32,16 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.loader.CollectionAliases;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.loader.CollectionAliases;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.type.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A persistent wrapper for an array. Lazy initialization
@ -51,7 +53,8 @@ import org.hibernate.type.Type;
public class PersistentArrayHolder extends AbstractPersistentCollection {
protected Object array;
private static final Logger log = LoggerFactory.getLogger(PersistentArrayHolder.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
PersistentArrayHolder.class.getPackage().getName());
//just to help out during the load (ugly, i know)
private transient Class elementClass;
@ -73,7 +76,7 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
Array.set( result, i, persister.getElementType().deepCopy(elt, entityMode, persister.getFactory()) );
}
catch (IllegalArgumentException iae) {
log.error("Array element type error", iae);
LOG.invalidArrayElementType(iae.getMessage());
throw new HibernateException( "Array element type error", iae );
}
}
@ -84,7 +87,8 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
return Array.getLength( snapshot ) == 0;
}
public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException {
@Override
public Collection getOrphans(Serializable snapshot, String entityName) throws HibernateException {
Object[] sn = (Object[]) snapshot;
Object[] arr = (Object[]) array;
ArrayList result = new ArrayList();
@ -126,7 +130,8 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
}
return list.iterator();
}
public boolean empty() {
@Override
public boolean empty() {
return false;
}
@ -146,11 +151,13 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
return elements();
}
public void beginRead() {
@Override
public void beginRead() {
super.beginRead();
tempList = new ArrayList();
}
public boolean endRead() {
@Override
public boolean endRead() {
setInitialized();
array = Array.newInstance( elementClass, tempList.size() );
for ( int i=0; i<tempList.size(); i++) {
@ -164,7 +171,8 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
//if (tempList==null) throw new UnsupportedOperationException("Can't lazily initialize arrays");
}
public boolean isDirectlyAccessible() {
@Override
public boolean isDirectlyAccessible() {
return true;
}
@ -196,7 +204,8 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
}
public Object getValue() {
@Override
public Object getValue() {
return array;
}
@ -249,4 +258,14 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
return entry!=null;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = ERROR )
@Message( value = "Array element type error\n%s" )
void invalidArrayElementType( String message );
}
}

View File

@ -0,0 +1,256 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.connection;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Environment;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Instantiates a connection provider given either <tt>System</tt> properties or
* a <tt>java.util.Properties</tt> instance. The <tt>ConnectionProviderFactory</tt>
* first attempts to find a name of a <tt>ConnectionProvider</tt> subclass in the
* property <tt>hibernate.connection.provider_class</tt>. If missing, heuristics are used
* to choose either <tt>DriverManagerConnectionProvider</tt>,
* <tt>DatasourceConnectionProvider</tt>, <tt>C3P0ConnectionProvider</tt> or
* <tt>DBCPConnectionProvider</tt>.
*
* @author Gavin King
* @see ConnectionProvider
*/
public final class ConnectionProviderFactory {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ConnectionProviderFactory.class.getPackage().getName());
/**
* Instantiate a <tt>ConnectionProvider</tt> using <tt>System</tt> properties.
*
* @return The created connection provider.
*
* @throws HibernateException
*/
public static ConnectionProvider newConnectionProvider() throws HibernateException {
return newConnectionProvider( Environment.getProperties() );
}
/**
* Instantiate a <tt>ConnectionProvider</tt> using given properties.
* Method newConnectionProvider.
*
* @param properties hibernate <tt>SessionFactory</tt> properties
*
* @return ConnectionProvider
*
* @throws HibernateException
*/
public static ConnectionProvider newConnectionProvider(Properties properties) throws HibernateException {
return newConnectionProvider( properties, null );
}
/**
* Create a connection provider based on the given information.
*
* @param properties Properties being used to build the {@link org.hibernate.SessionFactory}.
* @param connectionProviderInjectionData Something to be injected in the connection provided
*
* @return The created connection provider
*
* @throws HibernateException
*/
public static ConnectionProvider newConnectionProvider(Properties properties, Map connectionProviderInjectionData)
throws HibernateException {
ConnectionProvider connections;
String providerClass = properties.getProperty( Environment.CONNECTION_PROVIDER );
if ( providerClass != null ) {
connections = initializeConnectionProviderFromConfig( providerClass );
}
else if ( c3p0ConfigDefined( properties ) && c3p0ProviderPresent() ) {
connections = initializeConnectionProviderFromConfig("org.hibernate.connection.C3P0ConnectionProvider");
}
else if ( properties.getProperty( Environment.DATASOURCE ) != null ) {
connections = new DatasourceConnectionProvider();
}
else if ( properties.getProperty( Environment.URL ) != null ) {
connections = new DriverManagerConnectionProvider();
}
else {
connections = new UserSuppliedConnectionProvider();
}
if ( connectionProviderInjectionData != null && connectionProviderInjectionData.size() != 0 ) {
//inject the data
try {
BeanInfo info = Introspector.getBeanInfo( connections.getClass() );
PropertyDescriptor[] descritors = info.getPropertyDescriptors();
int size = descritors.length;
for ( int index = 0; index < size; index++ ) {
String propertyName = descritors[index].getName();
if ( connectionProviderInjectionData.containsKey( propertyName ) ) {
Method method = descritors[index].getWriteMethod();
method.invoke(
connections, new Object[] { connectionProviderInjectionData.get( propertyName ) }
);
}
}
}
catch ( IntrospectionException e ) {
throw new HibernateException( "Unable to inject objects into the connection provider", e );
}
catch ( IllegalAccessException e ) {
throw new HibernateException( "Unable to inject objects into the connection provider", e );
}
catch ( InvocationTargetException e ) {
throw new HibernateException( "Unable to inject objects into the connection provider", e );
}
}
connections.configure( properties );
return connections;
}
private static boolean c3p0ProviderPresent() {
try {
ReflectHelper.classForName( "org.hibernate.connection.C3P0ConnectionProvider" );
}
catch ( ClassNotFoundException e ) {
LOG.c3p0PropertiesIgnored();
return false;
}
return true;
}
private static boolean c3p0ConfigDefined(Properties properties) {
Iterator iter = properties.keySet().iterator();
while ( iter.hasNext() ) {
String property = (String) iter.next();
if ( property.startsWith( "hibernate.c3p0" ) ) {
return true;
}
}
return false;
}
private static ConnectionProvider initializeConnectionProviderFromConfig(String providerClass) {
ConnectionProvider connections;
try {
LOG.initializingConnectionProvider(providerClass);
connections = (ConnectionProvider) ReflectHelper.classForName( providerClass ).newInstance();
}
catch ( Exception e ) {
LOG.unableToInstantiateConnectionProvider(e.getMessage());
throw new HibernateException( "Could not instantiate connection provider: " + providerClass );
}
return connections;
}
// cannot be instantiated
private ConnectionProviderFactory() {
throw new UnsupportedOperationException();
}
/**
* Transform JDBC connection properties.
*
* Passed in the form <tt>hibernate.connection.*</tt> to the
* format accepted by <tt>DriverManager</tt> by trimming the leading "<tt>hibernate.connection</tt>".
*/
public static Properties getConnectionProperties(Properties properties) {
Iterator iter = properties.keySet().iterator();
Properties result = new Properties();
while ( iter.hasNext() ) {
String prop = (String) iter.next();
if ( prop.startsWith( Environment.CONNECTION_PREFIX ) && !SPECIAL_PROPERTIES.contains( prop ) ) {
result.setProperty(
prop.substring( Environment.CONNECTION_PREFIX.length() + 1 ),
properties.getProperty( prop )
);
}
}
String userName = properties.getProperty( Environment.USER );
if ( userName != null ) {
result.setProperty( "user", userName );
}
return result;
}
private static final Set SPECIAL_PROPERTIES;
static {
SPECIAL_PROPERTIES = new HashSet();
SPECIAL_PROPERTIES.add( Environment.DATASOURCE );
SPECIAL_PROPERTIES.add( Environment.URL );
SPECIAL_PROPERTIES.add( Environment.CONNECTION_PROVIDER );
SPECIAL_PROPERTIES.add( Environment.POOL_SIZE );
SPECIAL_PROPERTIES.add( Environment.ISOLATION );
SPECIAL_PROPERTIES.add( Environment.DRIVER );
SPECIAL_PROPERTIES.add( Environment.USER );
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "c3p0 properties is specified, but could not find org.hibernate.connection.C3P0ConnectionProvider from the classpath, these properties are going to be ignored." )
void c3p0PropertiesIgnored();
@LogMessage( level = INFO )
@Message( value = "Initializing connection provider: %s" )
void initializingConnectionProvider( String providerClass );
@LogMessage( level = ERROR )
@Message( value = "Could not instantiate connection provider\n%s" )
void unableToInstantiateConnectionProvider( String message );
}
}

View File

@ -0,0 +1,130 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.connection;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import javax.sql.DataSource;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.jndi.JndiHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A connection provider that uses a <tt>DataSource</tt> registered with JNDI.
* Hibernate will use this <tt>ConnectionProvider</tt> by default if the
* property <tt>hibernate.connection.datasource</tt> is set.
* @see ConnectionProvider
* @author Gavin King
*/
public class DatasourceConnectionProvider implements ConnectionProvider {
private DataSource ds;
private String user;
private String pass;
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
DatasourceConnectionProvider.class.getPackage().getName());
public DataSource getDataSource() {
return ds;
}
public void setDataSource(DataSource ds) {
this.ds = ds;
}
public void configure(Properties props) throws HibernateException {
String jndiName = props.getProperty( Environment.DATASOURCE );
if ( jndiName == null ) {
String msg = "datasource JNDI name was not specified by property " + Environment.DATASOURCE;
LOG.unspecifiedJndiName(msg);
throw new HibernateException( msg );
}
user = props.getProperty( Environment.USER );
pass = props.getProperty( Environment.PASS );
try {
ds = ( DataSource ) JndiHelper.getInitialContext( props ).lookup( jndiName );
}
catch ( Exception e ) {
LOG.datasourceNotFound(jndiName, e.getMessage());
throw new HibernateException( "Could not find datasource", e );
}
if ( ds == null ) {
throw new HibernateException( "Could not find datasource: " + jndiName );
}
LOG.usingDatasource(jndiName);
}
public Connection getConnection() throws SQLException {
if (user != null || pass != null) {
return ds.getConnection(user, pass);
}
else {
return ds.getConnection();
}
}
public void closeConnection(Connection conn) throws SQLException {
conn.close();
}
public void close() {}
/**
* @see ConnectionProvider#supportsAggressiveRelease()
*/
public boolean supportsAggressiveRelease() {
return true;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = ERROR )
@Message( value = "Could not find datasource: %s\n%s" )
void datasourceNotFound( String jndiName,
String message );
@LogMessage( level = ERROR )
@Message( value = "%s" )
void unspecifiedJndiName( String msg );
@LogMessage( level = INFO )
@Message( value = "Using datasource: %s" )
void usingDatasource( String jndiName );
}
}

View File

@ -0,0 +1,275 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.connection;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A connection provider that uses <tt>java.sql.DriverManager</tt>. This provider
* also implements a very rudimentary connection pool.
* @see ConnectionProvider
* @author Gavin King
*/
public class DriverManagerConnectionProvider implements ConnectionProvider {
private String url;
private Properties connectionProps;
private Integer isolation;
private final ArrayList pool = new ArrayList();
private int poolSize;
private int checkedOut = 0;
private boolean autocommit;
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
DriverManagerConnectionProvider.class.getPackage().getName());
public void configure(Properties props) throws HibernateException {
String driverClass = props.getProperty(Environment.DRIVER);
poolSize = ConfigurationHelper.getInt(Environment.POOL_SIZE, props, 20); //default pool size 20
LOG.usingHibernateConnectionPool();
LOG.hibernateConnectionPoolSize(poolSize);
autocommit = ConfigurationHelper.getBoolean(Environment.AUTOCOMMIT, props);
LOG.autocommmitMode(autocommit);
isolation = ConfigurationHelper.getInteger(Environment.ISOLATION, props);
if (isolation!=null)
LOG.jdbcIsolationLevel(Environment.isolationLevelToString(isolation.intValue()));
if (driverClass == null) LOG.unspecifiedJdbcDriver(Environment.DRIVER);
else {
try {
// trying via forName() first to be as close to DriverManager's semantics
Class.forName(driverClass);
}
catch (ClassNotFoundException cnfe) {
try {
ReflectHelper.classForName(driverClass);
}
catch (ClassNotFoundException e) {
String msg = "JDBC Driver class not found: " + driverClass;
LOG.jdbcDriverClassNotFound(msg, e.getMessage());
throw new HibernateException(msg, e);
}
}
}
url = props.getProperty( Environment.URL );
if ( url == null ) {
String msg = "JDBC URL was not specified by property " + Environment.URL;
LOG.unspecifiedJdbcUrl(msg);
throw new HibernateException( msg );
}
connectionProps = ConnectionProviderFactory.getConnectionProperties( props );
LOG.usingDriver(driverClass, url);
// if debug level is enabled, then log the password, otherwise mask it
if (LOG.isDebugEnabled()) LOG.connectionProperties(connectionProps);
else LOG.connectionProperties(ConfigurationHelper.maskOut(connectionProps, "password"));
}
public Connection getConnection() throws SQLException {
LOG.checkedOutConnections(checkedOut);
synchronized (pool) {
if ( !pool.isEmpty() ) {
int last = pool.size() - 1;
if (LOG.isTraceEnabled()) {
LOG.usingPooledJdbcConnection(last);
checkedOut++;
}
Connection pooled = (Connection) pool.remove(last);
if (isolation!=null) pooled.setTransactionIsolation( isolation.intValue() );
if ( pooled.getAutoCommit()!=autocommit ) pooled.setAutoCommit(autocommit);
return pooled;
}
}
LOG.openingNewJdbcConnection();
Connection conn = DriverManager.getConnection(url, connectionProps);
if (isolation!=null) conn.setTransactionIsolation( isolation.intValue() );
if ( conn.getAutoCommit()!=autocommit ) conn.setAutoCommit(autocommit);
LOG.createdConnection(url, conn.getTransactionIsolation());
if (LOG.isTraceEnabled()) checkedOut++;
return conn;
}
public void closeConnection(Connection conn) throws SQLException {
if (LOG.isDebugEnabled()) checkedOut--;
synchronized (pool) {
int currentSize = pool.size();
if ( currentSize < poolSize ) {
LOG.returningConnectionToPool(currentSize + 1);
pool.add(conn);
return;
}
}
LOG.closingJdbcConnection();
conn.close();
}
@Override
protected void finalize() {
close();
}
public void close() {
LOG.cleaningConnectionPool(url);
Iterator iter = pool.iterator();
while ( iter.hasNext() ) {
try {
( (Connection) iter.next() ).close();
}
catch (SQLException sqle) {
LOG.unableToClosePooledConnection(sqle.getMessage());
}
}
pool.clear();
}
/**
* @see ConnectionProvider#supportsAggressiveRelease()
*/
public boolean supportsAggressiveRelease() {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Autocommit mode: %s" )
void autocommmitMode( boolean autocommit );
@LogMessage( level = TRACE )
@Message( value = "Total checked-out connections: %d" )
void checkedOutConnections( int checkedOut );
@LogMessage( level = INFO )
@Message( value = "Cleaning up connection pool: %s" )
void cleaningConnectionPool( String url );
@LogMessage( level = DEBUG )
@Message( value = "Closing JDBC connection" )
void closingJdbcConnection();
@LogMessage( level = INFO )
@Message( value = "Connection properties: %s" )
void connectionProperties( Properties connectionProps );
@LogMessage( level = DEBUG )
@Message( value = "Created connection to: %s, Isolation Level: %d" )
void createdConnection( String url,
int transactionIsolation );
@LogMessage( level = INFO )
@Message( value = "Hibernate connection pool size: %s" )
void hibernateConnectionPoolSize( int poolSize );
@LogMessage( level = ERROR )
@Message( value = "%s\n%s" )
void jdbcDriverClassNotFound( String message,
String errorMessage );
@LogMessage( level = INFO )
@Message( value = "JDBC isolation level: %s" )
void jdbcIsolationLevel( String isolationLevelToString );
@LogMessage( level = DEBUG )
@Message( value = "Opening new JDBC connection" )
void openingNewJdbcConnection();
@LogMessage( level = TRACE )
@Message( value = "Returning connection to pool, pool size: %d" )
void returningConnectionToPool( int i );
@LogMessage( level = WARN )
@Message( value = "Problem closing pooled connection\n%s" )
void unableToClosePooledConnection( String message );
@LogMessage( level = WARN )
@Message( value = "No JDBC Driver class was specified by property %s" )
void unspecifiedJdbcDriver( String driver );
@LogMessage( level = ERROR )
@Message( value = "%s" )
void unspecifiedJdbcUrl( String message );
@LogMessage( level = INFO )
@Message( value = "Using driver: %s at URL: %s" )
void usingDriver( String driverClass,
String url );
@LogMessage( level = INFO )
@Message( value = "Using Hibernate built-in connection pool (not for production use!)" )
void usingHibernateConnectionPool();
@LogMessage( level = TRACE )
@Message( value = "Using pooled JDBC connection, pool size: %d" )
void usingPooledJdbcConnection( int last );
}
}

View File

@ -24,19 +24,21 @@
*/
package org.hibernate.context;
import org.hibernate.HibernateException;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.util.Hashtable;
import java.util.Map;
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.HibernateException;
import org.hibernate.classic.Session;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.util.JTAHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import javax.transaction.Synchronization;
import java.util.Map;
import java.util.Hashtable;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* An implementation of {@link CurrentSessionContext} which scopes the notion
@ -63,7 +65,8 @@ import java.util.Hashtable;
*/
public class JTASessionContext implements CurrentSessionContext {
private static final Logger log = LoggerFactory.getLogger( JTASessionContext.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
JTASessionContext.class.getPackage().getName());
protected final SessionFactoryImplementor factory;
private transient Map currentSessionMap = new Hashtable();
@ -118,7 +121,7 @@ public class JTASessionContext implements CurrentSessionContext {
currentSession.close();
}
catch ( Throwable ignore ) {
log.debug( "Unable to release generated current-session on failed synch registration", ignore );
LOG.unableToReleaseSession(ignore.getMessage());
}
throw new HibernateException( "Unable to register cleanup Synchronization with TransactionManager" );
}
@ -209,4 +212,15 @@ public class JTASessionContext implements CurrentSessionContext {
context.currentSessionMap.remove( transactionIdentifier );
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Unable to release generated current-session on failed synch registration\n%s" )
void unableToReleaseSession( String message );
}
}

View File

@ -24,6 +24,9 @@
*/
package org.hibernate.context;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
@ -35,14 +38,15 @@ import java.lang.reflect.Proxy;
import java.util.HashMap;
import java.util.Map;
import javax.transaction.Synchronization;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
import org.hibernate.classic.Session;
import org.hibernate.engine.SessionFactoryImplementor;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A {@link CurrentSessionContext} impl which scopes the notion of current
@ -72,7 +76,8 @@ import org.hibernate.engine.SessionFactoryImplementor;
*/
public class ThreadLocalSessionContext implements CurrentSessionContext {
private static final Logger log = LoggerFactory.getLogger( ThreadLocalSessionContext.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ThreadLocalSessionContext.class.getPackage().getName());
private static final Class[] SESS_PROXY_INTERFACES = new Class[] {
org.hibernate.classic.Session.class,
org.hibernate.engine.SessionImplementor.class,
@ -204,20 +209,20 @@ public class ThreadLocalSessionContext implements CurrentSessionContext {
private static void cleanupAnyOrphanedSession(SessionFactory factory) {
Session orphan = doUnbind( factory, false );
if ( orphan != null ) {
log.warn( "Already session bound on call to bind(); make sure you clean up your sessions!" );
LOG.alreadySessionBound();
try {
if ( orphan.getTransaction() != null && orphan.getTransaction().isActive() ) {
try {
orphan.getTransaction().rollback();
}
catch( Throwable t ) {
log.debug( "Unable to rollback transaction for orphaned session", t );
LOG.unableToRollbackTransaction(t.getMessage());
}
}
orphan.close();
}
catch( Throwable t ) {
log.debug( "Unable to close orphaned session", t );
LOG.unableToCloseOrphanedSession(t.getMessage());
}
}
}
@ -330,7 +335,7 @@ public class ThreadLocalSessionContext implements CurrentSessionContext {
|| "isTransactionInProgress".equals( method.getName() )
|| "setFlushMode".equals( method.getName() )
|| "getSessionFactory".equals( method.getName() ) ) {
log.trace( "allowing method [" + method.getName() + "] in non-transacted context" );
LOG.allowingMethodInNonTransactedContext(method.getName());
}
else if ( "reconnect".equals( method.getName() )
|| "disconnect".equals( method.getName() ) ) {
@ -340,7 +345,7 @@ public class ThreadLocalSessionContext implements CurrentSessionContext {
throw new HibernateException( method.getName() + " is not valid without active transaction" );
}
}
log.trace( "allowing proxied method [" + method.getName() + "] to proceed to real session" );
LOG.allowingProxiedMethodInSession(method.getName());
return method.invoke( realSession, args );
}
catch ( InvocationTargetException e ) {
@ -384,4 +389,31 @@ public class ThreadLocalSessionContext implements CurrentSessionContext {
doBind( wrappedSession, factory );
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Allowing method [%s] in non-transacted context" )
void allowingMethodInNonTransactedContext( String name );
@LogMessage( level = TRACE )
@Message( value = "Allowing proxied method [%s] to proceed to real session" )
void allowingProxiedMethodInSession( String name );
@LogMessage( level = WARN )
@Message( value = "Already session bound on call to bind(); make sure you clean up your sessions!" )
void alreadySessionBound();
@LogMessage( level = DEBUG )
@Message( value = "Unable to close orphaned session\n%s" )
void unableToCloseOrphanedSession( String message );
@LogMessage( level = DEBUG )
@Message( value = "Unable to rollback transaction for orphaned session\n%s" )
void unableToRollbackTransaction( String message );
}
}

View File

@ -23,20 +23,21 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.WARN;
import java.lang.reflect.Method;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.MappingException;
import org.hibernate.dialect.function.AnsiTrimFunction;
import org.hibernate.dialect.function.DerbyConcatFunction;
import org.hibernate.sql.CaseFragment;
import org.hibernate.sql.DerbyCaseFragment;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Hibernate Dialect for Cloudscape 10 - aka Derby. This implements both an
* Hibernate Dialect for Cloudscape 10 - aka Derby. This implements both an
* override for the identity column generator as well as for the case statement
* issue documented at:
* http://www.jroller.com/comments/kenlars99/Weblog/cloudscape_soon_to_be_derby
@ -44,7 +45,9 @@ import org.hibernate.util.ReflectHelper;
* @author Simon Johnston
*/
public class DerbyDialect extends DB2Dialect {
private static final Logger log = LoggerFactory.getLogger( DerbyDialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
DerbyDialect.class.getPackage().getName());
private int driverVersionMajor;
private int driverVersionMinor;
@ -67,7 +70,7 @@ public class DerbyDialect extends DB2Dialect {
driverVersionMinor = ( (Integer) minorVersionGetter.invoke( null, ReflectHelper.NO_PARAMS ) ).intValue();
}
catch ( Exception e ) {
log.warn( "Unable to load/access derby driver class sysinfo to check versions : " + e );
LOG.unableToLoadDerbyDriver(e.getMessage());
driverVersionMajor = -1;
driverVersionMinor = -1;
}
@ -77,22 +80,26 @@ public class DerbyDialect extends DB2Dialect {
return driverVersionMajor > 10 || ( driverVersionMajor == 10 && driverVersionMinor >= 5 );
}
public String getCrossJoinSeparator() {
@Override
public String getCrossJoinSeparator() {
return ", ";
}
/**
* Return the case statement modified for Cloudscape.
*/
public CaseFragment createCaseFragment() {
@Override
public CaseFragment createCaseFragment() {
return new DerbyCaseFragment();
}
public boolean dropConstraints() {
@Override
public boolean dropConstraints() {
return true;
}
public boolean supportsSequences() {
@Override
public boolean supportsSequences() {
// technically sequence support was added in 10.6.1.0...
//
// The problem though is that I am not exactly sure how to differentiate 10.6.1.0 from any other 10.6.x release.
@ -116,31 +123,37 @@ public class DerbyDialect extends DB2Dialect {
}
}
public boolean supportsLimit() {
@Override
public boolean supportsLimit() {
return isTenPointFiveReleaseOrNewer();
}
//HHH-4531
public boolean supportsCommentOn() {
@Override
public boolean supportsCommentOn() {
return false;
}
public boolean supportsLimitOffset() {
@Override
public boolean supportsLimitOffset() {
return isTenPointFiveReleaseOrNewer();
}
public String getForUpdateString() {
@Override
public String getForUpdateString() {
return " for update with rs";
}
public String getWriteLockString(int timeout) {
@Override
public String getWriteLockString(int timeout) {
return " for update with rs";
}
public String getReadLockString(int timeout) {
@Override
public String getReadLockString(int timeout) {
return " for read only with rs";
}
/**
* {@inheritDoc}
@ -155,7 +168,8 @@ public class DerbyDialect extends DB2Dialect {
* [WITH {RR|RS|CS|UR}]
* </pre>
*/
public String getLimitString(String query, final int offset, final int limit) {
@Override
public String getLimitString(String query, final int offset, final int limit) {
StringBuffer sb = new StringBuffer(query.length() + 50);
final String normalizedSelect = query.toLowerCase().trim();
@ -190,7 +204,8 @@ public class DerbyDialect extends DB2Dialect {
return sb.toString();
}
public boolean supportsVariableLimit() {
@Override
public boolean supportsVariableLimit() {
// we bind the limit and offset values directly into the sql...
return false;
}
@ -211,14 +226,27 @@ public class DerbyDialect extends DB2Dialect {
return i;
}
public String getQuerySequencesString() {
@Override
public String getQuerySequencesString() {
return null ;
}
// Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public boolean supportsLobValueChangePropogation() {
@Override
public boolean supportsLobValueChangePropogation() {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Unable to load/access derby driver class sysinfo to check versions : %s" )
void unableToLoadDerbyDriver( String message );
}
}

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.INFO;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
@ -32,10 +33,6 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@ -69,6 +66,10 @@ import org.hibernate.sql.JoinFragment;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Represents a dialect of SQL implemented by a particular RDBMS.
@ -83,7 +84,7 @@ import org.hibernate.util.StringHelper;
*/
public abstract class Dialect {
private static final Logger log = LoggerFactory.getLogger( Dialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Dialect.class.getPackage().getName());
public static final String DEFAULT_BATCH_SIZE = "15";
public static final String NO_BATCH = "0";
@ -105,7 +106,7 @@ public abstract class Dialect {
// constructors and factory methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
protected Dialect() {
log.info( "Using dialect: " + this );
LOG.usingDialect(this);
StandardAnsiSqlAggregationFunctions.primeFunctionMap( sqlFunctions );
// standard sql92 functions (can be overridden by subclasses)
@ -210,7 +211,8 @@ public abstract class Dialect {
return properties;
}
public String toString() {
@Override
public String toString() {
return getClass().getName();
}
@ -577,7 +579,8 @@ public abstract class Dialect {
* @throws MappingException If sequences are not supported.
* @deprecated Use {@link #getCreateSequenceString(String, int, int)} instead
*/
public String[] getCreateSequenceStrings(String sequenceName) throws MappingException {
@Deprecated
public String[] getCreateSequenceStrings(String sequenceName) throws MappingException {
return new String[] { getCreateSequenceString( sequenceName ) };
}
@ -1012,7 +1015,7 @@ public abstract class Dialect {
* dialect given the aliases of the columns to be write locked.
*
* @param aliases The columns to be write locked.
* @param lockOptions
* @param lockOptions
* @return The appropriate <tt>FOR UPDATE OF column_list</tt> clause string.
*/
public String getForUpdateString(String aliases, LockOptions lockOptions) {
@ -1169,7 +1172,7 @@ public abstract class Dialect {
* <li><i>null</i> - defer to the JDBC driver response in regards to
* {@link java.sql.DatabaseMetaData#dataDefinitionCausesTransactionCommit()}</li>
* </ul>
*
*
* @return see the result matrix above.
*/
public Boolean performTemporaryTableDDLInIsolation() {
@ -1925,13 +1928,24 @@ public abstract class Dialect {
return false;
}
/**
* Does this dialect support `count(distinct a,b)`?
*
* @return True if the database supports counting disintct tuples; false otherwise.
*/
/**
* Does this dialect support `count(distinct a,b)`?
*
* @return True if the database supports counting distinct tuples; false otherwise.
*/
public boolean supportsTupleDistinctCounts() {
// oddly most database in fact seem to, so true is the default.
return true;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "Using dialect: %s" )
void usingDialect( Dialect dialect );
}
}

View File

@ -23,12 +23,9 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.SQLException;
import java.sql.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.AvgWithArgumentCastFunction;
import org.hibernate.dialect.function.NoArgSQLFunction;
@ -38,6 +35,10 @@ import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter;
import org.hibernate.exception.ViolatedConstraintNameExtracter;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A dialect compatible with the H2 database.
@ -45,7 +46,9 @@ import org.hibernate.util.ReflectHelper;
* @author Thomas Mueller
*/
public class H2Dialect extends Dialect {
private static final Logger log = LoggerFactory.getLogger( H2Dialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
H2Dialect.class.getPackage().getName());
private String querySequenceString;
@ -62,13 +65,9 @@ public class H2Dialect extends Dialect {
if ( buildId < 32 ) {
querySequenceString = "select name from information_schema.sequences";
}
if ( !( majorVersion > 1 || minorVersion > 2 || buildId >= 139 ) ) {
log.warn(
"The {} version of H2 implements temporary table creation such that it commits " +
"current transaction; multi-table, bulk hql/jpaql will not work properly",
( majorVersion + "." + minorVersion + "." + buildId )
);
}
if (!(majorVersion > 1 || minorVersion > 2 || buildId >= 139)) LOG.unsupportedMultiTableBulkHqlJpaql(majorVersion,
minorVersion,
buildId);
}
catch ( Exception e ) {
// ignore (probably H2 not in the classpath)
@ -346,4 +345,17 @@ public class H2Dialect extends Dialect {
// see http://groups.google.com/group/h2-database/browse_thread/thread/562d8a49e2dabe99?hl=en
return true;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "The %d.%d.%d version of H2 implements temporary table creation such that it commits current transaction; multi-table, bulk hql/jpaql will not work properly" )
void unsupportedMultiTableBulkHqlJpaql( int majorVersion,
int minorVersion,
int buildId );
}
}

View File

@ -23,30 +23,37 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.sql.SQLException;
import java.sql.Types;
import java.io.Serializable;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
import org.hibernate.JDBCException;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.AvgWithArgumentCastFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
import org.hibernate.dialect.lock.*;
import org.hibernate.dialect.lock.LockingStrategy;
import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.OptimisticLockingStrategy;
import org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.PessimisticReadSelectLockingStrategy;
import org.hibernate.dialect.lock.PessimisticWriteSelectLockingStrategy;
import org.hibernate.dialect.lock.SelectLockingStrategy;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.exception.JDBCExceptionHelper;
import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter;
import org.hibernate.exception.ViolatedConstraintNameExtracter;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.util.ReflectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* An SQL dialect compatible with HSQLDB (HyperSQL).
@ -61,7 +68,9 @@ import org.slf4j.LoggerFactory;
* @author Fred Toussi
*/
public class HSQLDialect extends Dialect {
private static final Logger log = LoggerFactory.getLogger( HSQLDialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
HSQLDialect.class.getPackage().getName());
/**
* version is 18 for 1.8 or 20 for 2.0
@ -590,9 +599,7 @@ public class HSQLDialect extends Dialect {
public void lock(Serializable id, Object version, Object object, int timeout, SessionImplementor session)
throws StaleObjectStateException, JDBCException {
if ( getLockMode().greaterThan( LockMode.READ ) ) {
log.warn( "HSQLDB supports only READ_UNCOMMITTED isolation" );
}
if (getLockMode().greaterThan(LockMode.READ)) LOG.hsqldbSupportsOnlyReadCommittedIsolation();
super.lock( id, version, object, timeout, session );
}
}
@ -659,4 +666,15 @@ public class HSQLDialect extends Dialect {
public boolean supportsTupleDistinctCounts() {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "HSQLDB supports only READ_UNCOMMITTED isolation" )
void hsqldbSupportsOnlyReadCommittedIsolation();
}
}

View File

@ -23,15 +23,11 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
@ -44,6 +40,10 @@ import org.hibernate.exception.TemplatedViolatedConstraintNameExtracter;
import org.hibernate.exception.ViolatedConstraintNameExtracter;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* An SQL dialect for Oracle 9 (uses ANSI-style syntax where possible).
@ -51,13 +51,15 @@ import org.hibernate.util.ReflectHelper;
* @deprecated Use either Oracle9iDialect or Oracle10gDialect instead
* @author Gavin King, David Channon
*/
@Deprecated
public class Oracle9Dialect extends Dialect {
private static final Logger log = LoggerFactory.getLogger( Oracle9Dialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Oracle9Dialect.class.getPackage().getName());
public Oracle9Dialect() {
super();
log.warn( "The Oracle9Dialect dialect has been deprecated; use either Oracle9iDialect or Oracle10gDialect instead" );
LOG.deprecatedOracle9Dialect();
registerColumnType( Types.BIT, "number(1,0)" );
registerColumnType( Types.BIGINT, "number(19,0)" );
registerColumnType( Types.SMALLINT, "number(5,0)" );
@ -124,7 +126,7 @@ public class Oracle9Dialect extends Dialect {
registerFunction( "current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) );
registerFunction( "current_time", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIME, false) );
registerFunction( "current_timestamp", new NoArgSQLFunction("current_timestamp", StandardBasicTypes.TIMESTAMP, false) );
registerFunction( "last_day", new StandardSQLFunction("last_day", StandardBasicTypes.DATE) );
registerFunction( "sysdate", new NoArgSQLFunction("sysdate", StandardBasicTypes.DATE, false) );
registerFunction( "systimestamp", new NoArgSQLFunction("systimestamp", StandardBasicTypes.TIMESTAMP, false) );
@ -211,14 +213,14 @@ public class Oracle9Dialect extends Dialect {
}
public String getLimitString(String sql, boolean hasOffset) {
sql = sql.trim();
boolean isForUpdate = false;
if ( sql.toLowerCase().endsWith(" for update") ) {
sql = sql.substring( 0, sql.length()-11 );
isForUpdate = true;
}
StringBuffer pagingSelect = new StringBuffer( sql.length()+100 );
if (hasOffset) {
pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( ");
@ -237,7 +239,7 @@ public class Oracle9Dialect extends Dialect {
if ( isForUpdate ) {
pagingSelect.append( " for update" );
}
return pagingSelect.toString();
}
@ -256,7 +258,7 @@ public class Oracle9Dialect extends Dialect {
public boolean useMaxForLimit() {
return true;
}
public boolean forUpdateOfColumns() {
return true;
}
@ -268,7 +270,7 @@ public class Oracle9Dialect extends Dialect {
public String getSelectGUIDString() {
return "select rawtohex(sys_guid()) from dual";
}
public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() {
return EXTRACTER;
}
@ -298,7 +300,7 @@ public class Oracle9Dialect extends Dialect {
};
// not final-static to avoid possible classcast exceptions if using different oracle drivers.
int oracletypes_cursor_value = 0;
int oracletypes_cursor_value = 0;
public int registerResultSetOutParameter(java.sql.CallableStatement statement,int col) throws SQLException {
if(oracletypes_cursor_value==0) {
try {
@ -306,14 +308,14 @@ public class Oracle9Dialect extends Dialect {
oracletypes_cursor_value = types.getField("CURSOR").getInt(types.newInstance());
} catch (Exception se) {
throw new HibernateException("Problem while trying to load or access OracleTypes.CURSOR value",se);
}
}
}
// register the type of the out param - an Oracle specific type
statement.registerOutParameter(col, oracletypes_cursor_value);
col++;
return col;
}
public ResultSet getResultSet(CallableStatement ps) throws SQLException {
ps.execute();
return ( ResultSet ) ps.getObject( 1 );
@ -322,7 +324,7 @@ public class Oracle9Dialect extends Dialect {
public boolean supportsUnionAll() {
return true;
}
public boolean supportsCommentOn() {
return true;
}
@ -370,4 +372,15 @@ public class Oracle9Dialect extends Dialect {
public boolean supportsExistsInSelect() {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "The Oracle9Dialect dialect has been deprecated; use either Oracle9iDialect or Oracle10gDialect instead" )
void deprecatedOracle9Dialect();
}
}

View File

@ -23,15 +23,16 @@
*/
package org.hibernate.dialect;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.sql.CaseFragment;
import org.hibernate.sql.DecodeCaseFragment;
import org.hibernate.sql.JoinFragment;
import org.hibernate.sql.OracleJoinFragment;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* An SQL dialect for Oracle, compatible with Oracle 8.
@ -39,13 +40,15 @@ import org.hibernate.sql.OracleJoinFragment;
* @deprecated Use Oracle8iDialect instead.
* @author Gavin King
*/
@Deprecated
public class OracleDialect extends Oracle9Dialect {
private static final Logger log = LoggerFactory.getLogger( OracleDialect.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
OracleDialect.class.getPackage().getName());
public OracleDialect() {
super();
log.warn( "The OracleDialect dialect has been deprecated; use Oracle8iDialect instead" );
LOG.deprecatedOracleDialect();
// Oracle8 and previous define only a "DATE" type which
// is used to represent all aspects of date/time
registerColumnType( Types.TIMESTAMP, "date" );
@ -53,14 +56,17 @@ public class OracleDialect extends Oracle9Dialect {
registerColumnType( Types.VARCHAR, 4000, "varchar2($l)" );
}
public JoinFragment createOuterJoinFragment() {
@Override
public JoinFragment createOuterJoinFragment() {
return new OracleJoinFragment();
}
public CaseFragment createCaseFragment() {
@Override
public CaseFragment createCaseFragment() {
return new DecodeCaseFragment();
}
public String getLimitString(String sql, boolean hasOffset) {
@Override
public String getLimitString(String sql, boolean hasOffset) {
sql = sql.trim();
boolean isForUpdate = false;
@ -68,7 +74,7 @@ public class OracleDialect extends Oracle9Dialect {
sql = sql.substring( 0, sql.length()-11 );
isForUpdate = true;
}
StringBuffer pagingSelect = new StringBuffer( sql.length()+100 );
if (hasOffset) {
pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( ");
@ -87,11 +93,12 @@ public class OracleDialect extends Oracle9Dialect {
if ( isForUpdate ) {
pagingSelect.append( " for update" );
}
return pagingSelect.toString();
}
public String getSelectClauseNullString(int sqlType) {
@Override
public String getSelectClauseNullString(int sqlType) {
switch(sqlType) {
case Types.VARCHAR:
case Types.CHAR:
@ -105,11 +112,24 @@ public class OracleDialect extends Oracle9Dialect {
}
}
public String getCurrentTimestampSelectString() {
@Override
public String getCurrentTimestampSelectString() {
return "select sysdate from dual";
}
public String getCurrentTimestampSQLFunctionName() {
@Override
public String getCurrentTimestampSQLFunctionName() {
return "sysdate";
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "The OracleDialect dialect has been deprecated; use Oracle8iDialect instead" )
void deprecatedOracleDialect();
}
}

View File

@ -23,21 +23,28 @@
*/
package org.hibernate.dialect;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.lock.*;
import static org.jboss.logging.Logger.Level.INFO;
import java.sql.Types;
import org.hibernate.Hibernate;
import org.hibernate.LockMode;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.lock.LockingStrategy;
import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.OptimisticLockingStrategy;
import org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.PessimisticReadUpdateLockingStrategy;
import org.hibernate.dialect.lock.PessimisticWriteUpdateLockingStrategy;
import org.hibernate.dialect.lock.SelectLockingStrategy;
import org.hibernate.dialect.lock.UpdateLockingStrategy;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.sql.CaseFragment;
import org.hibernate.sql.DecodeCaseFragment;
import org.hibernate.type.StandardBasicTypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* This is the Hibernate dialect for the Unisys 2200 Relational Database (RDMS).
@ -52,12 +59,14 @@ import org.slf4j.LoggerFactory;
* @author Ploski and Hanson
*/
public class RDMSOS2200Dialect extends Dialect {
private static Logger log = LoggerFactory.getLogger(RDMSOS2200Dialect.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
RDMSOS2200Dialect.class.getPackage().getName());
public RDMSOS2200Dialect() {
super();
// Display the dialect version.
log.info("RDMSOS2200Dialect version: 1.0");
LOG.rdmsOs2200Dialect();
/**
* This section registers RDMS Biult-in Functions (BIFs) with Hibernate.
@ -356,4 +365,15 @@ public class RDMSOS2200Dialect extends Dialect {
return new SelectLockingStrategy( lockable, lockMode );
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "RDMSOS2200Dialect version: 1.0" )
void rdmsOs2200Dialect();
}
}

View File

@ -23,21 +23,24 @@
*/
package org.hibernate.dialect.function;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.SessionFactoryImplementor;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Delegate for handling function "templates".
* Delegate for handling function "templates".
*
* @author Steve Ebersole
*/
public class TemplateRenderer {
private static final Logger log = LoggerFactory.getLogger( TemplateRenderer.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
TemplateRenderer.class.getPackage().getName());
private final String template;
private final String[] chunks;
@ -99,10 +102,8 @@ public class TemplateRenderer {
@SuppressWarnings({ "UnusedDeclaration" })
public String render(List args, SessionFactoryImplementor factory) {
int numberOfArguments = args.size();
if ( getAnticipatedNumberOfArguments() > 0 && numberOfArguments != getAnticipatedNumberOfArguments() ) {
log.warn( "Function template anticipated {} arguments, but {} arguments encountered",
getAnticipatedNumberOfArguments(), numberOfArguments );
}
if (getAnticipatedNumberOfArguments() > 0 && numberOfArguments != getAnticipatedNumberOfArguments()) LOG.missingArguments(getAnticipatedNumberOfArguments(),
numberOfArguments);
StringBuffer buf = new StringBuffer();
for ( int i = 0; i < chunks.length; ++i ) {
if ( i < paramIndexes.length ) {
@ -118,4 +119,16 @@ public class TemplateRenderer {
}
return buf.toString();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Function template anticipated %d arguments, but %d arguments encountered" )
void missingArguments( int anticipatedNumberOfArguments,
int numberOfArguments );
}
}

View File

@ -24,15 +24,16 @@
package org.hibernate.dialect.lock;
import java.io.Serializable;
import org.hibernate.*;
import org.hibernate.event.EventSource;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.OptimisticLockException;
import org.hibernate.StaleObjectStateException;
import org.hibernate.action.EntityVerifyVersionProcess;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.EntityEntry;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.event.EventSource;
import org.hibernate.persister.entity.Lockable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An optimistic locking strategy that verifies that the version hasn't changed (prior to transaction commit).
@ -44,7 +45,6 @@ import org.slf4j.LoggerFactory;
* @author Scott Marlow
*/
public class OptimisticLockingStrategy implements LockingStrategy {
private static final Logger log = LoggerFactory.getLogger( OptimisticLockingStrategy.class );
private final Lockable lockable;
private final LockMode lockMode;

View File

@ -26,19 +26,16 @@ package org.hibernate.dialect.lock;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.PessimisticLockException;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.hibernate.LockMode;
import org.hibernate.HibernateException;
import org.hibernate.StaleObjectStateException;
import org.hibernate.JDBCException;
import org.hibernate.PessimisticLockException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A pessimistic locking strategy where the locks are obtained through update statements.
@ -53,7 +50,9 @@ import org.slf4j.LoggerFactory;
* @author Scott Marlow
*/
public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
private static final Logger log = LoggerFactory.getLogger( PessimisticReadUpdateLockingStrategy.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
PessimisticReadUpdateLockingStrategy.class.getPackage().getName());
private final Lockable lockable;
private final LockMode lockMode;
@ -73,7 +72,7 @@ public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
throw new HibernateException( "[" + lockMode + "] not valid for update statement" );
}
if ( !lockable.isVersioned() ) {
log.warn( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" );
LOG.writeLocksNotSupported(lockable.getEntityName());
this.sql = null;
}
else {

View File

@ -26,19 +26,16 @@ package org.hibernate.dialect.lock;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.PessimisticLockException;
import org.hibernate.StaleObjectStateException;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.hibernate.LockMode;
import org.hibernate.HibernateException;
import org.hibernate.StaleObjectStateException;
import org.hibernate.JDBCException;
import org.hibernate.PessimisticLockException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A pessimistic locking strategy where the locks are obtained through update statements.
@ -53,7 +50,9 @@ import org.slf4j.LoggerFactory;
* @author Scott Marlow
*/
public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
private static final Logger log = LoggerFactory.getLogger( PessimisticWriteUpdateLockingStrategy.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
PessimisticWriteUpdateLockingStrategy.class.getPackage().getName());
private final Lockable lockable;
private final LockMode lockMode;
@ -73,7 +72,7 @@ public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
throw new HibernateException( "[" + lockMode + "] not valid for update statement" );
}
if ( !lockable.isVersioned() ) {
log.warn( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" );
LOG.writeLocksNotSupported(lockable.getEntityName());
this.sql = null;
}
else {

View File

@ -26,7 +26,6 @@ package org.hibernate.dialect.lock;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
@ -36,8 +35,6 @@ import org.hibernate.engine.SessionImplementor;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.sql.Update;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A locking strategy where the locks are obtained through update statements.
@ -49,7 +46,9 @@ import org.slf4j.LoggerFactory;
* @author Steve Ebersole
*/
public class UpdateLockingStrategy implements LockingStrategy {
private static final Logger log = LoggerFactory.getLogger( UpdateLockingStrategy.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
UpdateLockingStrategy.class.getPackage().getName());
private final Lockable lockable;
private final LockMode lockMode;
@ -69,7 +68,7 @@ public class UpdateLockingStrategy implements LockingStrategy {
throw new HibernateException( "[" + lockMode + "] not valid for update statement" );
}
if ( !lockable.isVersioned() ) {
log.warn( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" );
LOG.writeLocksNotSupported(lockable.getEntityName());
this.sql = null;
}
else {

View File

@ -0,0 +1,96 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect.resolver;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.hibernate.JDBCException;
import org.hibernate.cfg.CollectionSecondPass;
import org.hibernate.dialect.Dialect;
import org.hibernate.exception.JDBCConnectionException;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A templated resolver impl which delegates to the {@link #resolveDialectInternal} method
* and handles any thrown {@link SQLException}s.
*
* @author Steve Ebersole
*/
public abstract class AbstractDialectResolver implements DialectResolver {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionSecondPass.class.getPackage().getName());
/**
* {@inheritDoc}
* <p/>
* Here we template the resolution, delegating to {@link #resolveDialectInternal} and handling
* {@link java.sql.SQLException}s properly.
*/
public final Dialect resolveDialect(DatabaseMetaData metaData) {
try {
return resolveDialectInternal( metaData );
}
catch ( SQLException sqlException ) {
JDBCException jdbcException = BasicSQLExceptionConverter.INSTANCE.convert( sqlException );
if (jdbcException instanceof JDBCConnectionException) throw jdbcException;
LOG.unableToQueryDatabaseMetadata(BasicSQLExceptionConverter.MSG, sqlException.getMessage());
return null;
}
catch ( Throwable t ) {
LOG.unableToExecuteResolver(this, t.getMessage());
return null;
}
}
/**
* Perform the actual resolution without caring about handling {@link SQLException}s.
*
* @param metaData The database metadata
* @return The resolved dialect, or null if we could not resolve.
* @throws SQLException Indicates problems accessing the metadata.
*/
protected abstract Dialect resolveDialectInternal(DatabaseMetaData metaData) throws SQLException;
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Error executing resolver [%s] : %s" )
void unableToExecuteResolver( AbstractDialectResolver abstractDialectResolver,
String message );
@LogMessage( level = WARN )
@Message( value = "%s : %s" )
void unableToQueryDatabaseMetadata( String message,
String errorMessage );
}
}

View File

@ -24,10 +24,12 @@
package org.hibernate.dialect.resolver;
import java.sql.SQLException;
import org.hibernate.JDBCException;
import org.hibernate.exception.SQLStateConverter;
import org.hibernate.exception.ViolatedConstraintNameExtracter;
import org.hibernate.JDBCException;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A helper to centralize conversion of {@link java.sql.SQLException}s to {@link org.hibernate.JDBCException}s.
@ -35,8 +37,11 @@ import org.hibernate.JDBCException;
* @author Steve Ebersole
*/
public class BasicSQLExceptionConverter {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
BasicSQLExceptionConverter.class.getPackage().getName());
public static final BasicSQLExceptionConverter INSTANCE = new BasicSQLExceptionConverter();
public static final String MSG = "Unable to query java.sql.DatabaseMetaData";
public static final String MSG = LOG.unableToQueryDatabaseMetadata();
private static final SQLStateConverter CONVERTER = new SQLStateConverter( new ConstraintNameExtracter() );
@ -58,4 +63,14 @@ public class BasicSQLExceptionConverter {
return "???";
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@Message( value = "Unable to query java.sql.DatabaseMetaData" )
String unableToQueryDatabaseMetadata();
}
}

View File

@ -0,0 +1,181 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect.resolver;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.util.ReflectHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A factory for generating Dialect instances.
*
* @author Steve Ebersole
* @author Tomoto Shimizu Washio
*/
public class DialectFactory {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
DialectFactory.class.getPackage().getName());
private static DialectResolverSet DIALECT_RESOLVERS = new DialectResolverSet();
static {
// register the standard dialect resolver
DIALECT_RESOLVERS.addResolver( new StandardDialectResolver() );
// register resolvers set via Environment property
String userSpecifedResolverSetting = Environment.getProperties().getProperty( Environment.DIALECT_RESOLVERS );
if ( userSpecifedResolverSetting != null ) {
String[] userSpecifedResolvers = userSpecifedResolverSetting.split( "\\s+" );
for ( int i = 0; i < userSpecifedResolvers.length; i++ ) {
registerDialectResolver( userSpecifedResolvers[i] );
}
}
}
/*package*/ static void registerDialectResolver(String resolverName) {
try {
DialectResolver resolver = ( DialectResolver ) ReflectHelper.classForName( resolverName ).newInstance();
DIALECT_RESOLVERS.addResolverAtFirst( resolver );
}
catch ( ClassNotFoundException cnfe ) {
LOG.dialectResolverNotFound(resolverName);
}
catch ( Exception e ) {
LOG.unableToInstantiateDialectResolver(e.getMessage());
}
}
/**
* Builds an appropriate Dialect instance.
* <p/>
* If a dialect is explicitly named in the incoming properties, it is used. Otherwise, it is
* determined by dialect resolvers based on the passed connection.
* <p/>
* An exception is thrown if a dialect was not explicitly set and no resolver could make
* the determination from the given connection.
*
* @param properties The configuration properties.
* @param connection The configured connection.
* @return The appropriate dialect instance.
* @throws HibernateException No dialect specified and no resolver could make the determination.
*/
public static Dialect buildDialect(Properties properties, Connection connection) throws HibernateException {
String dialectName = properties.getProperty( Environment.DIALECT );
if ( dialectName == null ) {
return determineDialect( connection );
}
else {
return constructDialect( dialectName );
}
}
public static Dialect buildDialect(Properties properties) {
String dialectName = properties.getProperty( Environment.DIALECT );
if ( dialectName == null ) {
throw new HibernateException( "'hibernate.dialect' must be set when no Connection available" );
}
return constructDialect( dialectName );
}
/**
* Determine the appropriate Dialect to use given the connection.
*
* @param connection The configured connection.
* @return The appropriate dialect instance.
*
* @throws HibernateException No connection given or no resolver could make
* the determination from the given connection.
*/
private static Dialect determineDialect(Connection connection) {
if ( connection == null ) {
throw new HibernateException( "Connection cannot be null when 'hibernate.dialect' not set" );
}
try {
final DatabaseMetaData databaseMetaData = connection.getMetaData();
final Dialect dialect = DIALECT_RESOLVERS.resolveDialect( databaseMetaData );
if ( dialect == null ) {
throw new HibernateException(
"Unable to determine Dialect to use [name=" + databaseMetaData.getDatabaseProductName() +
", majorVersion=" + databaseMetaData.getDatabaseMajorVersion() +
"]; user must register resolver or explicitly set 'hibernate.dialect'"
);
}
return dialect;
}
catch ( SQLException sqlException ) {
throw new HibernateException(
"Unable to access java.sql.DatabaseMetaData to determine appropriate Dialect to use",
sqlException
);
}
}
/**
* Returns a dialect instance given the name of the class to use.
*
* @param dialectName The name of the dialect class.
*
* @return The dialect instance.
*/
public static Dialect constructDialect(String dialectName) {
try {
return ( Dialect ) ReflectHelper.classForName( dialectName ).newInstance();
}
catch ( ClassNotFoundException cnfe ) {
throw new HibernateException( "Dialect class not found: " + dialectName, cnfe );
}
catch ( Exception e ) {
throw new HibernateException( "Could not instantiate dialect class", e );
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Dialect resolver class not found: %s" )
void dialectResolverNotFound( String resolverName );
@LogMessage( level = WARN )
@Message( value = "Could not instantiate dialect resolver class : %s" )
void unableToInstantiateDialectResolver( String message );
}
}

View File

@ -0,0 +1,106 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect.resolver;
import static org.jboss.logging.Logger.Level.INFO;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.hibernate.cfg.CollectionSecondPass;
import org.hibernate.dialect.Dialect;
import org.hibernate.exception.JDBCConnectionException;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A {@link DialectResolver} implementation which coordinates resolution by delegating to its
* registered sub-resolvers. Sub-resolvers may be registered by calling either {@link #addResolver} or
* {@link #addResolverAtFirst}.
*
* @author Tomoto Shimizu Washio
*/
public class DialectResolverSet implements DialectResolver {
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionSecondPass.class.getPackage().getName());
private List resolvers = new ArrayList();
/**
* {@inheritDoc}
*/
public Dialect resolveDialect(DatabaseMetaData metaData) {
Iterator i = resolvers.iterator();
while ( i.hasNext() ) {
final DialectResolver resolver = ( DialectResolver ) i.next();
try {
Dialect dialect = resolver.resolveDialect( metaData );
if ( dialect != null ) {
return dialect;
}
}
catch ( JDBCConnectionException e ) {
throw e;
}
catch ( Throwable t ) {
LOG.subResolverException(t.getMessage());
}
}
return null;
}
/**
* Add a resolver at the end of the underlying resolver list. The resolver added by this method is at lower
* priority than any other existing resolvers.
*
* @param resolver The resolver to add.
*/
public void addResolver(DialectResolver resolver) {
resolvers.add( resolver );
}
/**
* Add a resolver at the beginning of the underlying resolver list. The resolver added by this method is at higher
* priority than any other existing resolvers.
*
* @param resolver The resolver to add.
*/
public void addResolverAtFirst(DialectResolver resolver) {
resolvers.add( 0, resolver );
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "sub-resolver threw unexpected exception, continuing to next : %s" )
void subResolverException( String message );
}
}

View File

@ -0,0 +1,164 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect.resolver;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.InformixDialect;
import org.hibernate.dialect.Ingres10Dialect;
import org.hibernate.dialect.Ingres9Dialect;
import org.hibernate.dialect.IngresDialect;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.dialect.Oracle10gDialect;
import org.hibernate.dialect.Oracle8iDialect;
import org.hibernate.dialect.Oracle9iDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.dialect.SybaseASE15Dialect;
import org.hibernate.dialect.SybaseAnywhereDialect;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* The standard Hibernate resolver.
*
* @author Steve Ebersole
*/
public class StandardDialectResolver extends AbstractDialectResolver{
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
StandardDialectResolver.class.getPackage().getName());
@Override
protected Dialect resolveDialectInternal(DatabaseMetaData metaData) throws SQLException {
String databaseName = metaData.getDatabaseProductName();
int databaseMajorVersion = metaData.getDatabaseMajorVersion();
if ( "HSQL Database Engine".equals( databaseName ) ) {
return new HSQLDialect();
}
if ( "H2".equals( databaseName ) ) {
return new H2Dialect();
}
if ( "MySQL".equals( databaseName ) ) {
return new MySQLDialect();
}
if ( "PostgreSQL".equals( databaseName ) ) {
return new PostgreSQLDialect();
}
if ( "Apache Derby".equals( databaseName ) ) {
return new DerbyDialect();
}
if ( "ingres".equalsIgnoreCase( databaseName ) ) {
switch( databaseMajorVersion ) {
case 9:
int databaseMinorVersion = metaData.getDatabaseMinorVersion();
if (databaseMinorVersion > 2) {
return new Ingres9Dialect();
}
return new IngresDialect();
case 10:
LOG.unsupportedIngresVersion();
return new Ingres10Dialect();
default:
LOG.unknownIngresVersion(databaseMajorVersion);
}
return new IngresDialect();
}
if ( databaseName.startsWith( "Microsoft SQL Server" ) ) {
return new SQLServerDialect();
}
if ( "Sybase SQL Server".equals( databaseName ) || "Adaptive Server Enterprise".equals( databaseName ) ) {
return new SybaseASE15Dialect();
}
if ( databaseName.startsWith( "Adaptive Server Anywhere" ) ) {
return new SybaseAnywhereDialect();
}
if ( "Informix Dynamic Server".equals( databaseName ) ) {
return new InformixDialect();
}
if ( databaseName.startsWith( "DB2/" ) ) {
return new DB2Dialect();
}
if ( "Oracle".equals( databaseName ) ) {
switch ( databaseMajorVersion ) {
case 11:
LOG.unsupportedOracleVersion();
return new Oracle10gDialect();
case 10:
return new Oracle10gDialect();
case 9:
return new Oracle9iDialect();
case 8:
return new Oracle8iDialect();
default:
LOG.unknownOracleVersion(databaseMajorVersion);
}
}
return null;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "Unknown Ingres major version [%s]; using Ingres 9.2 dialect" )
void unknownIngresVersion( int databaseMajorVersion );
@LogMessage( level = WARN )
@Message( value = "Unknown Oracle major version [%s]" )
void unknownOracleVersion( int databaseMajorVersion );
@LogMessage( level = WARN )
@Message( value = "Ingres 10 is not yet fully supported; using Ingres 9.3 dialect" )
void unsupportedIngresVersion();
@LogMessage( level = WARN )
@Message( value = "Oracle 11g is not yet fully supported; using Oracle 10g dialect" )
void unsupportedOracleVersion();
}
}

View File

@ -23,6 +23,9 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
@ -33,10 +36,6 @@ import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.action.AfterTransactionCompletionProcess;
@ -52,6 +51,10 @@ import org.hibernate.action.EntityUpdateAction;
import org.hibernate.action.Executable;
import org.hibernate.cache.CacheException;
import org.hibernate.type.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Responsible for maintaining the queue of actions related to events.
@ -64,7 +67,8 @@ import org.hibernate.type.Type;
*/
public class ActionQueue {
private static final Logger log = LoggerFactory.getLogger( ActionQueue.class );
static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ActionQueue.class.getPackage().getName());
private static final int INIT_QUEUE_LIST_SIZE = 5;
private SessionImplementor session;
@ -249,9 +253,7 @@ public class ActionQueue {
final Serializable[] spaces = action.getPropertySpaces();
for ( Serializable space : spaces ) {
if ( tableSpaces.contains( space ) ) {
if ( log.isDebugEnabled() ) {
log.debug( "changes must be flushed to space: " + space );
}
LOG.changesMustBeFlushed(space);
return true;
}
}
@ -299,7 +301,8 @@ public class ActionQueue {
*
* @return a string representation of the object.
*/
public String toString() {
@Override
public String toString() {
return new StringBuffer()
.append( "ActionQueue[insertions=" ).append( insertions )
.append( " updates=" ).append( updates )
@ -412,45 +415,45 @@ public class ActionQueue {
* @throws IOException Indicates an error writing to the stream
*/
public void serialize(ObjectOutputStream oos) throws IOException {
log.trace( "serializing action-queue" );
LOG.serializingActionQueue();
int queueSize = insertions.size();
log.trace( "starting serialization of [" + queueSize + "] insertions entries" );
LOG.serializingInsertions(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( insertions.get( i ) );
}
queueSize = deletions.size();
log.trace( "starting serialization of [" + queueSize + "] deletions entries" );
LOG.serializingDeletions(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( deletions.get( i ) );
}
queueSize = updates.size();
log.trace( "starting serialization of [" + queueSize + "] updates entries" );
LOG.serializingUpdates(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( updates.get( i ) );
}
queueSize = collectionUpdates.size();
log.trace( "starting serialization of [" + queueSize + "] collectionUpdates entries" );
LOG.serializingCollectionUpdates(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( collectionUpdates.get( i ) );
}
queueSize = collectionRemovals.size();
log.trace( "starting serialization of [" + queueSize + "] collectionRemovals entries" );
LOG.serializingCollectionRemovals(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( collectionRemovals.get( i ) );
}
queueSize = collectionCreations.size();
log.trace( "starting serialization of [" + queueSize + "] collectionCreations entries" );
LOG.serializingCollectionCreations(queueSize);
oos.writeInt( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
oos.writeObject( collectionCreations.get( i ) );
@ -473,46 +476,46 @@ public class ActionQueue {
public static ActionQueue deserialize(
ObjectInputStream ois,
SessionImplementor session) throws IOException, ClassNotFoundException {
log.trace( "deserializing action-queue" );
LOG.deserializingActionQueue();
ActionQueue rtn = new ActionQueue( session );
int queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] insertions entries" );
LOG.deserializingInsertions(queueSize);
rtn.insertions = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.insertions.add( ois.readObject() );
}
queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] deletions entries" );
LOG.deserializingDeletions(queueSize);
rtn.deletions = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.deletions.add( ois.readObject() );
}
queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] updates entries" );
LOG.deserializingUpdates(queueSize);
rtn.updates = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.updates.add( ois.readObject() );
}
queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] collectionUpdates entries" );
LOG.deserializingCollectionUpdates(queueSize);
rtn.collectionUpdates = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.collectionUpdates.add( ois.readObject() );
}
queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] collectionRemovals entries" );
LOG.deserializingCollectionRemovals(queueSize);
rtn.collectionRemovals = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.collectionRemovals.add( ois.readObject() );
}
queueSize = ois.readInt();
log.trace( "starting deserialization of [" + queueSize + "] collectionCreations entries" );
LOG.deserializingCollectionCreations(queueSize);
rtn.collectionCreations = new ArrayList<Executable>( queueSize );
for ( int i = 0; i < queueSize; i++ ) {
rtn.collectionCreations.add( ois.readObject() );
@ -591,7 +594,7 @@ public class ActionQueue {
process.doAfterTransactionCompletion( success, session );
}
catch ( CacheException ce ) {
log.error( "could not release a cache lock", ce );
LOG.unableToReleaseCacheLock(ce);
// continue loop
}
catch ( Exception e ) {
@ -730,4 +733,74 @@ public class ActionQueue {
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Changes must be flushed to space: %s" )
void changesMustBeFlushed( Serializable space );
@LogMessage( level = ERROR )
@Message( value = "Could not release a cache lock : %s" )
void unableToReleaseCacheLock( CacheException ce );
@LogMessage( level = TRACE )
@Message( value = "Dedeserializing action-queue" )
void deserializingActionQueue();
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] collectionCreations entries" )
void deserializingCollectionCreations( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] collectionRemovals entries" )
void deserializingCollectionRemovals( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] collectionUpdates entries" )
void deserializingCollectionUpdates( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] deletions entries" )
void deserializingDeletions( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] insertions entries" )
void deserializingInsertions( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] updates entries" )
void deserializingUpdates( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Serializing action-queue" )
void serializingActionQueue();
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] collectionCreations entries" )
void serializingCollectionCreations( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] collectionRemovals entries" )
void serializingCollectionRemovals( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] collectionUpdates entries" )
void serializingCollectionUpdates( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] deletions entries" )
void serializingDeletions( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] insertions entries" )
void serializingInsertions( int queueSize );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] updates entries" )
void serializingUpdates( int queueSize );
}
}

View File

@ -23,14 +23,12 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Stack;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.collection.PersistentCollection;
@ -44,6 +42,10 @@ import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.hibernate.util.CollectionHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Delegate responsible for, in conjunction with the various
@ -97,8 +99,7 @@ public final class Cascade {
*/
public static final int BEFORE_MERGE = 0;
private static final Logger log = LoggerFactory.getLogger( Cascade.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Cascade.class.getPackage().getName());
private int cascadeTo;
@ -141,9 +142,7 @@ public final class Cascade {
throws HibernateException {
if ( persister.hasCascades() || action.requiresNoCascadeChecking() ) { // performance opt
if ( log.isTraceEnabled() ) {
log.trace( "processing cascade " + action + " for: " + persister.getEntityName() );
}
LOG.processingCascade(action, persister.getEntityName());
Type[] types = persister.getPropertyTypes();
CascadeStyle[] cascadeStyles = persister.getPropertyCascadeStyles();
@ -179,9 +178,7 @@ public final class Cascade {
}
}
if ( log.isTraceEnabled() ) {
log.trace( "done processing cascade " + action + " for: " + persister.getEntityName() );
}
LOG.processingCascadeEnded(action, persister.getEntityName());
}
}
@ -250,10 +247,10 @@ public final class Cascade {
}
if ( loadedValue != null ) {
final String entityName = entry.getPersister().getEntityName();
if ( log.isTraceEnabled() ) {
if (LOG.isTraceEnabled()) {
final Serializable id = entry.getPersister().getIdentifier( loadedValue, eventSource );
final String description = MessageHelper.infoString( entityName, id );
log.trace( "deleting orphaned entity instance: " + description );
LOG.deletingOrphanedEntity(description);
}
eventSource.delete( entityName, loadedValue, false, new HashSet() );
}
@ -411,52 +408,44 @@ public final class Cascade {
// we can't cascade to non-embedded elements
boolean embeddedElements = eventSource.getEntityMode()!=EntityMode.DOM4J ||
( (EntityType) collectionType.getElementType( eventSource.getFactory() ) ).isEmbeddedInXML();
boolean reallyDoCascade = style.reallyDoCascade(action) &&
boolean reallyDoCascade = style.reallyDoCascade(action) &&
embeddedElements && child!=CollectionType.UNFETCHED_COLLECTION;
if ( reallyDoCascade ) {
if ( log.isTraceEnabled() ) {
log.trace( "cascade " + action + " for collection: " + collectionType.getRole() );
}
LOG.cascadeActionForCollection(action, collectionType.getRole());
Iterator iter = action.getCascadableChildrenIterator(eventSource, collectionType, child);
while ( iter.hasNext() ) {
cascadeProperty(
parent,
iter.next(),
iter.next(),
elemType,
style,
null,
anything,
isCascadeDeleteEnabled
anything,
isCascadeDeleteEnabled
);
}
if ( log.isTraceEnabled() ) {
log.trace( "done cascade " + action + " for collection: " + collectionType.getRole() );
}
LOG.cascadeActionForColectionEnded(action, collectionType.getRole());
}
final boolean deleteOrphans = style.hasOrphanDelete() &&
action.deleteOrphans() &&
elemType.isEntityType() &&
final boolean deleteOrphans = style.hasOrphanDelete() &&
action.deleteOrphans() &&
elemType.isEntityType() &&
child instanceof PersistentCollection; //a newly instantiated collection can't have orphans
if ( deleteOrphans ) { // handle orphaned entities!!
if ( log.isTraceEnabled() ) {
log.trace( "deleting orphans for collection: " + collectionType.getRole() );
}
LOG.deletingOrphansForCollection(collectionType.getRole());
// we can do the cast since orphan-delete does not apply to:
// 1. newly instantiated collections
// 2. arrays (we can't track orphans for detached arrays)
final String entityName = collectionType.getAssociatedEntityName( eventSource.getFactory() );
deleteOrphans( entityName, (PersistentCollection) child );
if ( log.isTraceEnabled() ) {
log.trace( "done deleting orphans for collection: " + collectionType.getRole() );
}
LOG.deletingOrphansForCollectionEnded(collectionType.getRole());
}
}
@ -468,24 +457,60 @@ public final class Cascade {
final Collection orphans;
if ( pc.wasInitialized() ) {
CollectionEntry ce = eventSource.getPersistenceContext().getCollectionEntry(pc);
orphans = ce==null ?
orphans = ce==null ?
CollectionHelper.EMPTY_COLLECTION :
ce.getOrphans(entityName, pc);
}
else {
orphans = pc.getQueuedOrphans(entityName);
}
final Iterator orphanIter = orphans.iterator();
while ( orphanIter.hasNext() ) {
Object orphan = orphanIter.next();
if (orphan!=null) {
if ( log.isTraceEnabled() ) {
log.trace("deleting orphaned entity instance: " + entityName);
}
LOG.deletingOrphanedEntity(entityName);
eventSource.delete( entityName, orphan, false, new HashSet() );
}
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Cascade %s for collection: %s" )
void cascadeActionForCollection( CascadingAction action,
String role );
@LogMessage( level = TRACE )
@Message( value = "Done cascade %s for collection: %s" )
void cascadeActionForColectionEnded( CascadingAction action,
String role );
@LogMessage( level = TRACE )
@Message( value = "Deleting orphaned entity instance: %s" )
void deletingOrphanedEntity( String description );
@LogMessage( level = TRACE )
@Message( value = "Deleting orphans for collection: %s" )
void deletingOrphansForCollection( String role );
@LogMessage( level = TRACE )
@Message( value = "Done deleting orphans for collection: %s" )
void deletingOrphansForCollectionEnded( String role );
@LogMessage( level = TRACE )
@Message( value = "Processing cascade %s for: %s" )
void processingCascade( CascadingAction action,
String entityName );
@LogMessage( level = TRACE )
@Message( value = "Done processing cascade %s for: %s" )
void processingCascadeEnded( CascadingAction action,
String entityName );
}
}

View File

@ -24,24 +24,26 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.TRACE;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.ReplicationMode;
import org.hibernate.TransientObjectException;
import org.hibernate.LockOptions;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.event.EventSource;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.type.CollectionType;
import org.hibernate.type.Type;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A session action that may be cascaded from parent entity to its children
@ -50,7 +52,8 @@ import org.hibernate.type.EntityType;
*/
public abstract class CascadingAction {
private static final Logger log = LoggerFactory.getLogger( CascadingAction.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CascadingAction.class.getPackage().getName());
// the CascadingAction contract ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -137,22 +140,24 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#delete(Object)
*/
public static final CascadingAction DELETE = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace("cascading to delete: " + entityName);
}
LOG.cascadingToDelete(entityName);
session.delete( entityName, child, isCascadeDeleteEnabled, ( Set ) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// delete does cascade to uninitialized collections
return CascadingAction.getAllElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
// orphans should be deleted during delete
return true;
}
public String toString() {
@Override
public String toString() {
return "ACTION_DELETE";
}
};
@ -161,11 +166,10 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#lock(Object, LockMode)
*/
public static final CascadingAction LOCK = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to lock: " + entityName );
}
LOG.cascadingToLock(entityName);
LockMode lockMode = LockMode.NONE;
LockOptions lr = new LockOptions();
if ( anything instanceof LockOptions) {
@ -178,15 +182,18 @@ public abstract class CascadingAction {
lr.setLockMode(lockMode);
session.buildLockRequest(lr).lock(entityName, child);
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// lock doesn't cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
//TODO: should orphans really be deleted during lock???
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_LOCK";
}
};
@ -195,21 +202,23 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#refresh(Object)
*/
public static final CascadingAction REFRESH = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to refresh: " + entityName );
}
LOG.cascadingToRefresh(entityName);
session.refresh( child, (Map) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// refresh doesn't cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_REFRESH";
}
};
@ -218,24 +227,27 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#evict(Object)
*/
public static final CascadingAction EVICT = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to evict: " + entityName );
}
LOG.cascadingToEvict(entityName);
session.evict(child);
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// evicts don't cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
return false;
}
public boolean performOnLazyProperty() {
@Override
public boolean performOnLazyProperty() {
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_EVICT";
}
};
@ -244,25 +256,28 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#saveOrUpdate(Object)
*/
public static final CascadingAction SAVE_UPDATE = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to saveOrUpdate: " + entityName );
}
LOG.cascadingToSaveOrUpdate(entityName);
session.saveOrUpdate(entityName, child);
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// saves / updates don't cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
// orphans should be deleted during save/update
return true;
}
public boolean performOnLazyProperty() {
@Override
public boolean performOnLazyProperty() {
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_SAVE_UPDATE";
}
};
@ -271,23 +286,25 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#merge(Object)
*/
public static final CascadingAction MERGE = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to merge: " + entityName );
}
LOG.cascadingToMerge(entityName);
session.merge( entityName, child, (Map) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// merges don't cascade to uninitialized collections
// //TODO: perhaps this does need to cascade after all....
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
// orphans should not be deleted during merge??
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_MERGE";
}
};
@ -297,22 +314,24 @@ public abstract class CascadingAction {
*/
public static final CascadingAction SAVE_UPDATE_COPY = new CascadingAction() {
// for deprecated saveOrUpdateCopy()
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to saveOrUpdateCopy: " + entityName );
}
LOG.cascadingToSaveOrUpdateCopy(entityName);
session.saveOrUpdateCopy( entityName, child, (Map) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// saves / updates don't cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
// orphans should not be deleted during copy??
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_SAVE_UPDATE_COPY";
}
};
@ -321,24 +340,27 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#persist(Object)
*/
public static final CascadingAction PERSIST = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to persist: " + entityName );
}
LOG.cascadingToPersist(entityName);
session.persist( entityName, child, (Map) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// persists don't cascade to uninitialized collections
return CascadingAction.getAllElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
return false;
}
public boolean performOnLazyProperty() {
@Override
public boolean performOnLazyProperty() {
return false;
}
public String toString() {
@Override
public String toString() {
return "ACTION_PERSIST";
}
};
@ -349,24 +371,27 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#persist(Object)
*/
public static final CascadingAction PERSIST_ON_FLUSH = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to persistOnFlush: " + entityName );
}
LOG.cascadingToPersistOnFlush(entityName);
session.persistOnFlush( entityName, child, (Map) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// persists don't cascade to uninitialized collections
return CascadingAction.getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
return true;
}
public boolean requiresNoCascadeChecking() {
@Override
public boolean requiresNoCascadeChecking() {
return true;
}
public void noCascade(
@Override
public void noCascade(
EventSource session,
Object child,
Object parent,
@ -393,7 +418,8 @@ public abstract class CascadingAction {
}
}
}
public boolean performOnLazyProperty() {
@Override
public boolean performOnLazyProperty() {
return false;
}
@ -402,7 +428,8 @@ public abstract class CascadingAction {
return entry != null && (entry.getStatus() == Status.MANAGED || entry.getStatus() == Status.READ_ONLY);
}
public String toString() {
@Override
public String toString() {
return "ACTION_PERSIST_ON_FLUSH";
}
};
@ -411,21 +438,23 @@ public abstract class CascadingAction {
* @see org.hibernate.Session#replicate(Object, org.hibernate.ReplicationMode)
*/
public static final CascadingAction REPLICATE = new CascadingAction() {
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
@Override
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "cascading to replicate: " + entityName );
}
LOG.cascadingToReplicate(entityName);
session.replicate( entityName, child, (ReplicationMode) anything );
}
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
@Override
public Iterator getCascadableChildrenIterator(EventSource session, CollectionType collectionType, Object collection) {
// replicate does cascade to uninitialized collections
return getLoadedElementsIterator(session, collectionType, collection);
}
public boolean deleteOrphans() {
@Override
public boolean deleteOrphans() {
return false; //I suppose?
}
public String toString() {
@Override
public String toString() {
return "ACTION_REPLICATE";
}
};
@ -469,4 +498,50 @@ public abstract class CascadingAction {
return !(collection instanceof PersistentCollection) || ( (PersistentCollection) collection ).wasInitialized();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Cascading to delete: %s" )
void cascadingToDelete( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to evict: %s" )
void cascadingToEvict( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to lock: %s" )
void cascadingToLock( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to merge: %s" )
void cascadingToMerge( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to persist: %s" )
void cascadingToPersist( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to persist on flush: %s" )
void cascadingToPersistOnFlush( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to refresh: %s" )
void cascadingToRefresh( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to replicate: %s" )
void cascadingToReplicate( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to save or update: %s" )
void cascadingToSaveOrUpdate( String entityName );
@LogMessage( level = TRACE )
@Message( value = "Cascading to save or update copy: %s" )
void cascadingToSaveOrUpdateCopy( String entityName );
}
}

View File

@ -24,45 +24,48 @@
*/
package org.hibernate.engine;
import java.io.Serializable;
import java.io.ObjectOutputStream;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* We need an entry to tell us all about the current state
* of a collection with respect to its persistent state
*
*
* @author Gavin King
*/
public final class CollectionEntry implements Serializable {
private static final Logger log = LoggerFactory.getLogger(CollectionEntry.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionEntry.class.getPackage().getName());
//ATTRIBUTES MAINTAINED BETWEEN FLUSH CYCLES
// session-start/post-flush persistent state
private Serializable snapshot;
// allow the CollectionSnapshot to be serialized
private String role;
// "loaded" means the reference that is consistent
// "loaded" means the reference that is consistent
// with the current database state
private transient CollectionPersister loadedPersister;
private Serializable loadedKey;
// ATTRIBUTES USED ONLY DURING FLUSH CYCLE
// during flush, we navigate the object graph to
// collections and decide what to do with them
private transient boolean reached;
@ -73,8 +76,8 @@ public final class CollectionEntry implements Serializable {
// if we instantiate a collection during the flush() process,
// we must ignore it for the rest of the flush()
private transient boolean ignore;
// "current" means the reference that was found during flush()
// "current" means the reference that was found during flush()
private transient CollectionPersister currentPersister;
private transient Serializable currentKey;
@ -87,7 +90,7 @@ public final class CollectionEntry implements Serializable {
ignore = false;
collection.clearDirty(); //a newly wrapped collection is NOT dirty (or we get unnecessary version updates)
snapshot = persister.isMutable() ?
collection.getSnapshot(persister) :
null;
@ -98,15 +101,15 @@ public final class CollectionEntry implements Serializable {
* For collections just loaded from the database
*/
public CollectionEntry(
final PersistentCollection collection,
final CollectionPersister loadedPersister,
final Serializable loadedKey,
final PersistentCollection collection,
final CollectionPersister loadedPersister,
final Serializable loadedKey,
final boolean ignore
) {
this.ignore=ignore;
//collection.clearDirty()
this.loadedKey = loadedKey;
setLoadedPersister(loadedPersister);
@ -124,11 +127,11 @@ public final class CollectionEntry implements Serializable {
ignore = false;
//collection.clearDirty()
this.loadedKey = loadedKey;
setLoadedPersister(loadedPersister);
}
/**
* For initialized detached collections
*/
@ -141,7 +144,7 @@ public final class CollectionEntry implements Serializable {
loadedKey = collection.getKey();
setLoadedPersister( factory.getCollectionPersister( collection.getRole() ) );
snapshot = collection.getStoredSnapshot();
snapshot = collection.getStoredSnapshot();
}
/**
@ -168,40 +171,36 @@ public final class CollectionEntry implements Serializable {
* of the collection elements, if necessary
*/
private void dirty(PersistentCollection collection) throws HibernateException {
boolean forceDirty = collection.wasInitialized() &&
!collection.isDirty() && //optimization
getLoadedPersister() != null &&
getLoadedPersister().isMutable() && //optimization
( collection.isDirectlyAccessible() || getLoadedPersister().getElementType().isMutable() ) && //optimization
!collection.equalsSnapshot( getLoadedPersister() );
if ( forceDirty ) {
collection.dirty();
}
}
public void preFlush(PersistentCollection collection) throws HibernateException {
boolean nonMutableChange = collection.isDirty() &&
getLoadedPersister()!=null &&
boolean nonMutableChange = collection.isDirty() &&
getLoadedPersister()!=null &&
!getLoadedPersister().isMutable();
if (nonMutableChange) {
throw new HibernateException(
"changed an immutable collection instance: " +
"changed an immutable collection instance: " +
MessageHelper.collectionInfoString( getLoadedPersister().getRole(), getLoadedKey() )
);
}
dirty(collection);
if ( log.isDebugEnabled() && collection.isDirty() && getLoadedPersister() != null ) {
log.debug(
"Collection dirty: " +
MessageHelper.collectionInfoString( getLoadedPersister().getRole(), getLoadedKey() )
);
}
if (LOG.isDebugEnabled() && collection.isDirty() && getLoadedPersister() != null) LOG.collectionDirty(MessageHelper.collectionInfoString(getLoadedPersister().getRole(),
getLoadedKey()));
setDoupdate(false);
setDoremove(false);
@ -229,22 +228,22 @@ public final class CollectionEntry implements Serializable {
}
collection.setSnapshot(loadedKey, role, snapshot);
}
/**
* Called after execution of an action
*/
public void afterAction(PersistentCollection collection) {
loadedKey = getCurrentKey();
setLoadedPersister( getCurrentPersister() );
boolean resnapshot = collection.wasInitialized() &&
boolean resnapshot = collection.wasInitialized() &&
( isDoremove() || isDorecreate() || isDoupdate() );
if ( resnapshot ) {
snapshot = loadedPersister==null || !loadedPersister.isMutable() ?
null :
snapshot = loadedPersister==null || !loadedPersister.isMutable() ?
null :
collection.getSnapshot(loadedPersister); //re-snapshot
}
collection.postAction();
}
@ -264,7 +263,7 @@ public final class CollectionEntry implements Serializable {
loadedPersister = persister;
setRole( persister == null ? null : persister.getRole() );
}
void afterDeserialize(SessionFactoryImplementor factory) {
loadedPersister = ( factory == null ? null : factory.getCollectionPersister(role) );
}
@ -336,7 +335,7 @@ public final class CollectionEntry implements Serializable {
public void setCurrentKey(Serializable currentKey) {
this.currentKey = currentKey;
}
/**
* This is only available late during the flush cycle
*/
@ -352,11 +351,12 @@ public final class CollectionEntry implements Serializable {
this.role = role;
}
public String toString() {
String result = "CollectionEntry" +
@Override
public String toString() {
String result = "CollectionEntry" +
MessageHelper.collectionInfoString( loadedPersister.getRole(), loadedKey );
if (currentPersister!=null) {
result += "->" +
result += "->" +
MessageHelper.collectionInfoString( currentPersister.getRole(), currentKey );
}
return result;
@ -365,7 +365,7 @@ public final class CollectionEntry implements Serializable {
/**
* Get the collection orphans (entities which were removed from the collection)
*/
public Collection getOrphans(String entityName, PersistentCollection collection)
public Collection getOrphans(String entityName, PersistentCollection collection)
throws HibernateException {
if (snapshot==null) {
throw new AssertionFailure("no collection snapshot for orphan delete");
@ -377,7 +377,7 @@ public final class CollectionEntry implements Serializable {
//TODO: does this really need to be here?
// does the collection already have
// it's own up-to-date snapshot?
return collection.wasInitialized() &&
return collection.wasInitialized() &&
( getLoadedPersister()==null || getLoadedPersister().isMutable() ) &&
collection.isSnapshotEmpty( getSnapshot() );
}
@ -417,4 +417,15 @@ public final class CollectionEntry implements Serializable {
( session == null ? null : session.getFactory() )
);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Collection dirty: %s" )
void collectionDirty( String collectionInfoString );
}
}

View File

@ -24,8 +24,9 @@
*/
package org.hibernate.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import org.hibernate.AssertionFailure;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
@ -33,8 +34,10 @@ import org.hibernate.collection.PersistentCollection;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.CollectionType;
import java.io.Serializable;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Implements book-keeping for the collection persistence by reachability algorithm
@ -42,9 +45,10 @@ import java.io.Serializable;
*/
public final class Collections {
private Collections() {}
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Collections.class.getPackage().getName());
private static final Logger log = LoggerFactory.getLogger(Collections.class);
private Collections() {}
/**
* record the fact that this collection was dereferenced
@ -71,15 +75,9 @@ public final class Collections {
CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
final CollectionPersister loadedPersister = entry.getLoadedPersister();
if ( log.isDebugEnabled() && loadedPersister != null )
log.debug(
"Collection dereferenced: " +
MessageHelper.collectionInfoString(
loadedPersister,
entry.getLoadedKey(),
session.getFactory()
)
);
if (LOG.isDebugEnabled() && loadedPersister != null) LOG.collectionDereferenced(MessageHelper.collectionInfoString(loadedPersister,
entry.getLoadedKey(),
session.getFactory()));
// do a check
boolean hasOrphanDelete = loadedPersister != null &&
@ -135,14 +133,9 @@ public final class Collections {
final PersistenceContext persistenceContext = session.getPersistenceContext();
CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
log.debug(
"Found collection with unloaded owner: " +
MessageHelper.collectionInfoString(
entry.getLoadedPersister(),
entry.getLoadedKey(),
session.getFactory()
)
);
LOG.foundCollectionWithUnloadedOwner(MessageHelper.collectionInfoString(entry.getLoadedPersister(),
entry.getLoadedKey(),
session.getFactory()));
entry.setCurrentPersister( entry.getLoadedPersister() );
entry.setCurrentKey( entry.getLoadedKey() );
@ -151,14 +144,14 @@ public final class Collections {
}
/**
* Initialize the role of the collection.
*
* @param collection The collection to be updated by reachibility.
* @param type The type of the collection.
* @param entity The owner of the collection.
* @throws HibernateException
*/
/**
* Initialize the role of the collection.
*
* @param collection The collection to be updated by reachability.
* @param type The type of the collection.
* @param entity The owner of the collection.
* @throws HibernateException
*/
public static void processReachableCollection(
PersistentCollection collection,
CollectionType type,
@ -178,7 +171,7 @@ public final class Collections {
);
}
// The CollectionEntry.isReached() stuff is just to detect any silly users
// The CollectionEntry.isReached() stuff is just to detect any silly users
// who set up circular or shared references between/to collections.
if ( ce.isReached() ) {
// We've been here before
@ -194,15 +187,18 @@ public final class Collections {
ce.setCurrentPersister(persister);
ce.setCurrentKey( type.getKeyOfOwner(entity, session) ); //TODO: better to pass the id in as an argument?
if ( log.isDebugEnabled() ) {
log.debug(
"Collection found: " +
MessageHelper.collectionInfoString( persister, ce.getCurrentKey(), factory ) +
", was: " +
MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), factory ) +
( collection.wasInitialized() ? " (initialized)" : " (uninitialized)" )
);
}
if (LOG.isDebugEnabled()) {
if (collection.wasInitialized()) LOG.collectionFound(MessageHelper.collectionInfoString(persister,
ce.getCurrentKey(),
factory),
MessageHelper.collectionInfoString(ce.getLoadedPersister(),
ce.getLoadedKey(),
factory),
LOG.initialized());
else LOG.collectionFound(MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory),
MessageHelper.collectionInfoString(ce.getLoadedPersister(), ce.getLoadedKey(), factory),
LOG.uninitialized());
}
prepareCollectionForUpdate( collection, ce, session.getEntityMode(), factory );
@ -259,7 +255,7 @@ public final class Collections {
if ( loadedPersister != null ) {
entry.setDoremove(true); // we will need to remove ye olde entries
if ( entry.isDorecreate() ) {
log.trace( "Forcing collection initialization" );
LOG.forcingCollectionInitialization();
collection.forceInitialization(); // force initialize!
}
}
@ -273,4 +269,34 @@ public final class Collections {
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Collection dereferenced: %s" )
void collectionDereferenced( String collectionInfoString );
@LogMessage( level = DEBUG )
@Message( value = "Collection found: %s, was: %s (%s)" )
void collectionFound( String collectionInfoString,
String collectionInfoString2,
String initialized );
@LogMessage( level = TRACE )
@Message( value = "Forcing collection initialization" )
void forcingCollectionInitialization();
@LogMessage( level = DEBUG )
@Message( value = "Found collection with unloaded owner: %s" )
void foundCollectionWithUnloadedOwner( String collectionInfoString );
@Message( value = "initialized" )
String initialized();
@Message( value = "uninitialized" )
String uninitialized();
}
}

View File

@ -24,94 +24,109 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A strategy for determining if an identifier value is an identifier of
* a new transient instance or a previously persistent transient instance.
* The strategy is determined by the <tt>unsaved-value</tt> attribute in
* the mapping file.
*
*
* @author Gavin King
*/
public class IdentifierValue {
private static final Logger log = LoggerFactory.getLogger(IdentifierValue.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
IdentifierValue.class.getPackage().getName());
private final Serializable value;
/**
* Always assume the transient instance is newly instantiated
*/
public static final IdentifierValue ANY = new IdentifierValue() {
public final Boolean isUnsaved(Serializable id) {
log.trace("id unsaved-value strategy ANY");
@Override
public final Boolean isUnsaved(Serializable id) {
LOG.idUnsavedValueStrategy("ANY");
return Boolean.TRUE;
}
public Serializable getDefaultValue(Serializable currentValue) {
@Override
public Serializable getDefaultValue(Serializable currentValue) {
return currentValue;
}
public String toString() {
@Override
public String toString() {
return "SAVE_ANY";
}
};
/**
* Never assume the transient instance is newly instantiated
*/
public static final IdentifierValue NONE = new IdentifierValue() {
public final Boolean isUnsaved(Serializable id) {
log.trace("id unsaved-value strategy NONE");
@Override
public final Boolean isUnsaved(Serializable id) {
LOG.idUnsavedValueStrategy("NONE");
return Boolean.FALSE;
}
public Serializable getDefaultValue(Serializable currentValue) {
@Override
public Serializable getDefaultValue(Serializable currentValue) {
return currentValue;
}
public String toString() {
@Override
public String toString() {
return "SAVE_NONE";
}
};
/**
* Assume the transient instance is newly instantiated if the identifier
* is null.
*/
public static final IdentifierValue NULL = new IdentifierValue() {
public final Boolean isUnsaved(Serializable id) {
log.trace("id unsaved-value strategy NULL");
@Override
public final Boolean isUnsaved(Serializable id) {
LOG.idUnsavedValueStrategy("NULL");
return id==null ? Boolean.TRUE : Boolean.FALSE;
}
public Serializable getDefaultValue(Serializable currentValue) {
@Override
public Serializable getDefaultValue(Serializable currentValue) {
return null;
}
public String toString() {
@Override
public String toString() {
return "SAVE_NULL";
}
};
/**
* Assume nothing.
*/
public static final IdentifierValue UNDEFINED = new IdentifierValue() {
public final Boolean isUnsaved(Serializable id) {
log.trace("id unsaved-value strategy UNDEFINED");
@Override
public final Boolean isUnsaved(Serializable id) {
LOG.idUnsavedValueStrategy("UNDEFINED");
return null;
}
public Serializable getDefaultValue(Serializable currentValue) {
@Override
public Serializable getDefaultValue(Serializable currentValue) {
return null;
}
public String toString() {
@Override
public String toString() {
return "UNDEFINED";
}
};
protected IdentifierValue() {
this.value = null;
}
/**
* Assume the transient instance is newly instantiated if
* its identifier is null or equal to <tt>value</tt>
@ -119,20 +134,36 @@ public class IdentifierValue {
public IdentifierValue(Serializable value) {
this.value = value;
}
/**
* Does the given identifier belong to a new instance?
*/
public Boolean isUnsaved(Serializable id) {
if ( log.isTraceEnabled() ) log.trace("id unsaved-value: " + value);
LOG.idUnsavedValue(value);
return id==null || id.equals(value) ? Boolean.TRUE : Boolean.FALSE;
}
public Serializable getDefaultValue(Serializable currentValue) {
return value;
}
public String toString() {
@Override
public String toString() {
return "identifier unsaved-value: " + value;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "ID unsaved-value: %s" )
void idUnsavedValue( Serializable value );
@LogMessage( level = TRACE )
@Message( value = "ID unsaved-value strategy %s" )
void idUnsavedValueStrategy( String name );
}
}

View File

@ -24,15 +24,17 @@
*/
package org.hibernate.engine;
import org.hibernate.HibernateException;
import org.hibernate.type.Type;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Map;
import java.util.Iterator;
import java.util.Map;
import org.hibernate.HibernateException;
import org.hibernate.type.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Centralizes the commonality regarding binding of parameter values into
@ -40,12 +42,13 @@ import java.util.Iterator;
* <p/>
* Ideally would like to move to the parameter handling as it is done in
* the hql.ast package.
*
*
* @author Steve Ebersole
*/
public class ParameterBinder {
private static final Logger log = LoggerFactory.getLogger( ParameterBinder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ParameterBinder.class.getPackage().getName());
public static interface NamedParameterSource {
public int[] getNamedParameterLocations(String name);
@ -119,19 +122,26 @@ public class ParameterBinder {
TypedValue typedval = ( TypedValue ) e.getValue();
int[] locations = source.getNamedParameterLocations( name );
for ( int i = 0; i < locations.length; i++ ) {
if ( log.isDebugEnabled() ) {
log.debug( "bindNamedParameters() " +
typedval.getValue() + " -> " + name +
" [" + ( locations[i] + start ) + "]" );
}
LOG.bindNamedParameters(typedval.getValue(), name, locations[i] + start);
typedval.getType().nullSafeSet( ps, typedval.getValue(), locations[i] + start, session );
}
result += locations.length;
}
return result;
}
else {
return 0;
}
return 0;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "bindNamedParameters() %s -> %s [%d]" )
void bindNamedParameters( Object value,
String name,
int i );
}
}

View File

@ -24,6 +24,7 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
@ -31,27 +32,29 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
import org.hibernate.QueryException;
import org.hibernate.ScrollMode;
import org.hibernate.LockOptions;
import org.hibernate.impl.FilterImpl;
import org.hibernate.dialect.Dialect;
import org.hibernate.hql.classic.ParserHelper;
import org.hibernate.impl.FilterImpl;
import org.hibernate.pretty.Printer;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.Type;
import org.hibernate.util.ArrayHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* @author Gavin King
*/
public final class QueryParameters {
private static final Logger log = LoggerFactory.getLogger( QueryParameters.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
QueryParameters.class.getPackage().getName());
private Type[] positionalParameterTypes;
private Object[] positionalParameterValues;
@ -278,15 +281,9 @@ public final class QueryParameters {
public void traceParameters(SessionFactoryImplementor factory) throws HibernateException {
Printer print = new Printer( factory );
if ( positionalParameterValues.length != 0 ) {
log.trace(
"parameters: " +
print.toString( positionalParameterTypes, positionalParameterValues )
);
}
if ( namedParameters != null ) {
log.trace( "named parameters: " + print.toString( namedParameters ) );
}
if (positionalParameterValues.length != 0) LOG.parameters(print.toString(positionalParameterTypes,
positionalParameterValues));
if (namedParameters != null) LOG.namedParameters(print.toString(namedParameters));
}
public boolean isCacheable() {
@ -419,7 +416,7 @@ public final class QueryParameters {
public boolean isReadOnly(SessionImplementor session) {
return ( isReadOnlyInitialized ?
isReadOnly() :
session.getPersistenceContext().isDefaultReadOnly()
session.getPersistenceContext().isDefaultReadOnly()
);
}
@ -567,5 +564,18 @@ public final class QueryParameters {
return copy;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Named parameters: %s" )
void namedParameters( String string );
@LogMessage( level = TRACE )
@Message( value = "Parameters: %s" )
void parameters( String string );
}
}

View File

@ -24,6 +24,9 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
@ -35,10 +38,8 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.map.AbstractReferenceMap;
import org.apache.commons.collections.map.ReferenceMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
@ -47,16 +48,20 @@ import org.hibernate.MappingException;
import org.hibernate.NonUniqueObjectException;
import org.hibernate.PersistentObjectException;
import org.hibernate.TransientObjectException;
import org.hibernate.engine.loading.LoadContexts;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.engine.loading.LoadContexts;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.proxy.LazyInitializer;
import org.hibernate.tuple.ElementWrapper;
import org.hibernate.util.IdentityMap;
import org.hibernate.util.MarkerObject;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A <tt>PersistenceContext</tt> represents the state of persistent "stuff" which
@ -74,59 +79,59 @@ public class StatefulPersistenceContext implements PersistenceContext {
public static final Object NO_ROW = new MarkerObject( "NO_ROW" );
private static final Logger log = LoggerFactory.getLogger( StatefulPersistenceContext.class );
private static final Logger PROXY_WARN_LOG = LoggerFactory.getLogger( StatefulPersistenceContext.class.getName() + ".ProxyWarnLog" );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
StatefulPersistenceContext.class.getPackage().getName());
private static final int INIT_COLL_SIZE = 8;
private SessionImplementor session;
// Loaded entity instances, by EntityKey
private Map entitiesByKey;
// Loaded entity instances, by EntityUniqueKey
private Map entitiesByUniqueKey;
// Identity map of EntityEntry instances, by the entity instance
private Map entityEntries;
// Entity proxies, by EntityKey
private Map proxiesByKey;
// Snapshots of current database state for entities
// that have *not* been loaded
private Map entitySnapshotsByKey;
// Identity map of array holder ArrayHolder instances, by the array instance
private Map arrayHolders;
// Identity map of CollectionEntry instances, by the collection wrapper
private Map collectionEntries;
// Collection wrappers, by the CollectionKey
private Map collectionsByKey; //key=CollectionKey, value=PersistentCollection
// Set of EntityKeys of deleted objects
private HashSet nullifiableEntityKeys;
// properties that we have tried to load, and not found in the database
private HashSet nullAssociations;
// A list of collection wrappers that were instantiating during result set
// processing, that we will need to initialize at the end of the query
private List nonlazyCollections;
// A container for collections we load up when the owning entity is not
// yet loaded ... for now, this is purely transient!
private Map unownedCollections;
// Parent entities cache by their child for cascading
// May be empty or not contains all relation
// May be empty or not contains all relation
private Map parentsByChild;
private int cascading = 0;
private int loadCounter = 0;
private boolean flushing = false;
private boolean defaultReadOnly = false;
private boolean hasNonReadOnlyEntities = false;
@ -145,7 +150,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
entitiesByKey = new HashMap( INIT_COLL_SIZE );
entitiesByUniqueKey = new HashMap( INIT_COLL_SIZE );
proxiesByKey = new ReferenceMap( ReferenceMap.HARD, ReferenceMap.WEAK );
proxiesByKey = new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK );
entitySnapshotsByKey = new HashMap( INIT_COLL_SIZE );
entityEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
@ -153,7 +158,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
collectionsByKey = new HashMap( INIT_COLL_SIZE );
arrayHolders = IdentityMap.instantiate( INIT_COLL_SIZE );
parentsByChild = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
nullifiableEntityKeys = new HashSet();
initTransientState();
@ -167,7 +172,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
public boolean isStateless() {
return false;
}
public SessionImplementor getSession() {
return session;
}
@ -185,7 +190,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
unownedCollections.put(key, collection);
}
public PersistentCollection useUnownedCollection(CollectionKey key) {
if (unownedCollections==null) {
return null;
@ -194,7 +199,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return (PersistentCollection) unownedCollections.remove(key);
}
}
/**
* Get the <tt>BatchFetchQueue</tt>, instantiating one if
* necessary.
@ -256,12 +261,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
public boolean hasNonReadOnlyEntities() {
return hasNonReadOnlyEntities;
}
public void setEntryStatus(EntityEntry entry, Status status) {
entry.setStatus(status);
setHasNonReadOnlyEnties(status);
}
private void setHasNonReadOnlyEnties(Status status) {
if ( status==Status.DELETED || status==Status.MANAGED || status==Status.SAVING ) {
hasNonReadOnlyEntities = true;
@ -279,7 +284,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the current state of the entity as known to the underlying
* database, or null if there is no corresponding row
* database, or null if there is no corresponding row
*/
public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister)
throws HibernateException {
@ -361,7 +366,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
/**
* Get the entity instance associated with the given
* Get the entity instance associated with the given
* <tt>EntityKey</tt>
*/
public Object getEntity(EntityKey key) {
@ -449,12 +454,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
final LockMode lockMode,
final boolean existsInDatabase,
final EntityPersister persister,
final boolean disableVersionIncrement,
final boolean disableVersionIncrement,
boolean lazyPropertiesAreUnfetched
) {
addEntity( entityKey, entity );
return addEntry(
entity,
status,
@ -465,14 +470,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
lockMode,
existsInDatabase,
persister,
disableVersionIncrement,
disableVersionIncrement,
lazyPropertiesAreUnfetched
);
}
/**
* Generates an appropriate EntityEntry instance and adds it
* Generates an appropriate EntityEntry instance and adds it
* to the event source's internal caches.
*/
public EntityEntry addEntry(
@ -485,9 +490,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
final LockMode lockMode,
final boolean existsInDatabase,
final EntityPersister persister,
final boolean disableVersionIncrement,
final boolean disableVersionIncrement,
boolean lazyPropertiesAreUnfetched) {
EntityEntry e = new EntityEntry(
status,
loadedState,
@ -502,7 +507,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
lazyPropertiesAreUnfetched
);
entityEntries.put(entity, e);
setHasNonReadOnlyEnties(status);
return e;
}
@ -514,7 +519,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
public boolean containsProxy(Object entity) {
return proxiesByKey.containsValue( entity );
}
/**
* Takes the given object and, if it represents a proxy, reassociates it with this event source.
*
@ -526,7 +531,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( value instanceof ElementWrapper ) {
value = ( (ElementWrapper) value ).getElement();
}
if ( !Hibernate.isInitialized(value) ) {
HibernateProxy proxy = (HibernateProxy) value;
LazyInitializer li = proxy.getHibernateLazyInitializer();
@ -540,15 +545,15 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* If a deleted entity instance is re-saved, and it has a proxy, we need to
* reset the identifier of the proxy
* reset the identifier of the proxy
*/
public void reassociateProxy(Object value, Serializable id) throws MappingException {
if ( value instanceof ElementWrapper ) {
value = ( (ElementWrapper) value ).getElement();
}
if ( value instanceof HibernateProxy ) {
if ( log.isDebugEnabled() ) log.debug("setting proxy identifier: " + id);
LOG.settingProxyIdentifier(id);
HibernateProxy proxy = (HibernateProxy) value;
LazyInitializer li = proxy.getHibernateLazyInitializer();
li.setIdentifier(id);
@ -583,7 +588,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( maybeProxy instanceof ElementWrapper ) {
maybeProxy = ( (ElementWrapper) maybeProxy ).getElement();
}
if ( maybeProxy instanceof HibernateProxy ) {
HibernateProxy proxy = (HibernateProxy) maybeProxy;
LazyInitializer li = proxy.getHibernateLazyInitializer();
@ -611,7 +616,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( maybeProxy instanceof ElementWrapper ) {
maybeProxy = ( (ElementWrapper) maybeProxy ).getElement();
}
if ( maybeProxy instanceof HibernateProxy ) {
HibernateProxy proxy = (HibernateProxy) maybeProxy;
LazyInitializer li = proxy.getHibernateLazyInitializer();
@ -654,18 +659,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
*/
public Object narrowProxy(Object proxy, EntityPersister persister, EntityKey key, Object object)
throws HibernateException {
boolean alreadyNarrow = persister.getConcreteProxyClass( session.getEntityMode() )
.isAssignableFrom( proxy.getClass() );
if ( !alreadyNarrow ) {
if ( PROXY_WARN_LOG.isWarnEnabled() ) {
PROXY_WARN_LOG.warn(
"Narrowing proxy to " +
persister.getConcreteProxyClass( session.getEntityMode() ) +
" - this operation breaks =="
);
}
if (LOG.isEnabled(WARN)) LOG.narrowingProxy(persister.getConcreteProxyClass(session.getEntityMode()));
if ( object != null ) {
proxiesByKey.remove(key);
@ -685,19 +684,19 @@ public class StatefulPersistenceContext implements PersistenceContext {
( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().setReadOnly( readOnlyOrig );
}
return proxy;
}
}
}
else {
if ( object != null ) {
LazyInitializer li = ( (HibernateProxy) proxy ).getHibernateLazyInitializer();
li.setImplementation(object);
}
return proxy;
}
}
/**
@ -705,7 +704,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* third argument (the entity associated with the key) if no proxy exists. Init
* the proxy to the target implementation, if necessary.
*/
public Object proxyFor(EntityPersister persister, EntityKey key, Object impl)
public Object proxyFor(EntityPersister persister, EntityKey key, Object impl)
throws HibernateException {
if ( !persister.hasProxy() ) return impl;
Object proxy = proxiesByKey.get(key);
@ -845,7 +844,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* add an (initialized) collection that was created by another session and passed
* into update() (ie. one with a snapshot and existing state on the database)
*/
public void addInitializedDetachedCollection(CollectionPersister collectionPersister, PersistentCollection collection)
public void addInitializedDetachedCollection(CollectionPersister collectionPersister, PersistentCollection collection)
throws HibernateException {
if ( collection.isUnreferenced() ) {
//treat it just like a new collection
@ -867,14 +866,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
addCollection(collection, ce, id);
return ce;
}
/**
* Get the collection instance associated with the <tt>CollectionKey</tt>
*/
public PersistentCollection getCollection(CollectionKey collectionKey) {
return (PersistentCollection) collectionsByKey.get(collectionKey);
}
/**
* Register a collection for non-lazy loading at the end of the
* two-phase load
@ -890,7 +889,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
*/
public void initializeNonLazyCollections() throws HibernateException {
if ( loadCounter == 0 ) {
log.debug( "initializing non-lazy collections" );
LOG.initializingNonLazyCollections();
//do this work only at the very highest level of the load
loadCounter++; //don't let this method be called recursively
try {
@ -917,7 +916,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Register a <tt>PersistentCollection</tt> object for an array.
* Associates a holder with an array - MUST be called after loading
* Associates a holder with an array - MUST be called after loading
* array, since the array instance is not created until endLoad().
*/
public void addCollectionHolder(PersistentCollection holder) {
@ -999,7 +998,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Record the fact that an entity does not exist in the database
*
*
* @param key the primary key of the entity
*/
/*public void addNonExistantEntityKey(EntityKey key) {
@ -1008,7 +1007,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Record the fact that an entity does not exist in the database
*
*
* @param key a unique key of the entity
*/
/*public void addNonExistantEntityUniqueKey(EntityUniqueKey key) {
@ -1019,7 +1018,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
nonExistantEntityKeys.remove(key);
}*/
/**
/**
* Retrieve the set of EntityKeys representing nullifiable references
*/
public HashSet getNullifiableEntityKeys() {
@ -1104,7 +1103,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
*
* @return a string representation of the object.
*/
public String toString() {
@Override
public String toString() {
return new StringBuffer()
.append("PersistenceContext[entityKeys=")
.append(entitiesByKey.keySet())
@ -1258,7 +1258,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
.getEntityPersister(entity);
CollectionPersister cp = session.getFactory()
.getCollectionPersister(entity + '.' + property);
// try cache lookup first
Object parent = parentsByChild.get(childEntity);
if (parent != null) {
@ -1290,9 +1290,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
EntityEntry ee = (EntityEntry) me.getValue();
if ( persister.isSubclassEntityName( ee.getEntityName() ) ) {
Object instance = me.getKey();
Object index = getIndexInParent(property, childEntity, persister, cp, instance);
if (index==null && mergeMap!=null) {
Object unmergedInstance = mergeMap.get(instance);
Object unmergedChild = mergeMap.get(childEntity);
@ -1300,20 +1300,20 @@ public class StatefulPersistenceContext implements PersistenceContext {
index = getIndexInParent(property, unmergedChild, persister, cp, unmergedInstance);
}
}
if (index!=null) return index;
}
}
return null;
}
private Object getIndexInParent(
String property,
Object childEntity,
EntityPersister persister,
String property,
Object childEntity,
EntityPersister persister,
CollectionPersister collectionPersister,
Object potentialParent
){
){
Object collection = persister.getPropertyValue( potentialParent, property, session.getEntityMode() );
if ( collection!=null && Hibernate.isInitialized(collection) ) {
return collectionPersister.getCollectionType().indexOf(collection, childEntity);
@ -1322,7 +1322,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return null;
}
}
/**
* Record the fact that the association belonging to the keyed
* entity is null.
@ -1330,14 +1330,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void addNullProperty(EntityKey ownerKey, String propertyName) {
nullAssociations.add( new AssociationKey(ownerKey, propertyName) );
}
/**
* Is the association property belonging to the keyed entity null?
*/
public boolean isPropertyNull(EntityKey ownerKey, String propertyName) {
return nullAssociations.contains( new AssociationKey(ownerKey, propertyName) );
}
private void clearNullProperties() {
nullAssociations.clear();
}
@ -1435,13 +1435,13 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @throws IOException serialization errors.
*/
public void serialize(ObjectOutputStream oos) throws IOException {
log.trace( "serializing persistent-context" );
LOG.serializingPersistentContext();
oos.writeBoolean( defaultReadOnly );
oos.writeBoolean( hasNonReadOnlyEntities );
oos.writeInt( entitiesByKey.size() );
log.trace( "starting serialization of [" + entitiesByKey.size() + "] entitiesByKey entries" );
LOG.serializingEntitiesByKey(entitiesByKey.size());
Iterator itr = entitiesByKey.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1450,7 +1450,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( entitiesByUniqueKey.size() );
log.trace( "starting serialization of [" + entitiesByUniqueKey.size() + "] entitiesByUniqueKey entries" );
LOG.serializingEntitiesByUniqueKey(entitiesByUniqueKey.size());
itr = entitiesByUniqueKey.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1459,7 +1459,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( proxiesByKey.size() );
log.trace( "starting serialization of [" + proxiesByKey.size() + "] proxiesByKey entries" );
LOG.serializingProxiesByKey(proxiesByKey.size());
itr = proxiesByKey.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1468,7 +1468,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( entitySnapshotsByKey.size() );
log.trace( "starting serialization of [" + entitySnapshotsByKey.size() + "] entitySnapshotsByKey entries" );
LOG.serializingEntitySnapshotsByKey(entitySnapshotsByKey.size());
itr = entitySnapshotsByKey.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1477,7 +1477,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( entityEntries.size() );
log.trace( "starting serialization of [" + entityEntries.size() + "] entityEntries entries" );
LOG.serializingEntityEntries(entityEntries.size());
itr = entityEntries.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1486,7 +1486,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( collectionsByKey.size() );
log.trace( "starting serialization of [" + collectionsByKey.size() + "] collectionsByKey entries" );
LOG.serializingCollectionsByKey(collectionsByKey.size());
itr = collectionsByKey.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1495,7 +1495,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( collectionEntries.size() );
log.trace( "starting serialization of [" + collectionEntries.size() + "] collectionEntries entries" );
LOG.serializingCollectionEntries(collectionEntries.size());
itr = collectionEntries.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1504,7 +1504,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( arrayHolders.size() );
log.trace( "starting serialization of [" + arrayHolders.size() + "] arrayHolders entries" );
LOG.serializingArrayHolders(arrayHolders.size());
itr = arrayHolders.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
@ -1513,7 +1513,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
oos.writeInt( nullifiableEntityKeys.size() );
log.trace( "starting serialization of [" + nullifiableEntityKeys.size() + "] nullifiableEntityKeys entries" );
LOG.serializingNullifiableEntityKeys(nullifiableEntityKeys.size());
itr = nullifiableEntityKeys.iterator();
while ( itr.hasNext() ) {
EntityKey entry = ( EntityKey ) itr.next();
@ -1524,7 +1524,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
public static StatefulPersistenceContext deserialize(
ObjectInputStream ois,
SessionImplementor session) throws IOException, ClassNotFoundException {
log.trace( "deserializing persistent-context" );
LOG.deserializingPersistentContext();
StatefulPersistenceContext rtn = new StatefulPersistenceContext( session );
// during deserialization, we need to reconnect all proxies and
@ -1538,44 +1538,41 @@ public class StatefulPersistenceContext implements PersistenceContext {
rtn.hasNonReadOnlyEntities = ois.readBoolean();
int count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] entitiesByKey entries" );
LOG.deserializingEntitiesByKey(count);
rtn.entitiesByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitiesByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] entitiesByUniqueKey entries" );
LOG.deserializingEntitiesByUniqueKey(count);
rtn.entitiesByUniqueKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] proxiesByKey entries" );
rtn.proxiesByKey = new ReferenceMap( ReferenceMap.HARD, ReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f );
LOG.deserializingProxiesByKey(count);
rtn.proxiesByKey = new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f );
for ( int i = 0; i < count; i++ ) {
EntityKey ek = EntityKey.deserialize( ois, session );
Object proxy = ois.readObject();
if ( proxy instanceof HibernateProxy ) {
( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().setSession( session );
rtn.proxiesByKey.put( ek, proxy );
}
else {
log.trace( "encountered prunded proxy" );
}
} else LOG.encounteredPrunedProxy();
// otherwise, the proxy was pruned during the serialization process
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] entitySnapshotsByKey entries" );
LOG.deserializingEntitySnapshotsByKey(count);
rtn.entitySnapshotsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitySnapshotsByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] entityEntries entries" );
LOG.deserializingEntityEntries(count);
rtn.entityEntries = IdentityMap.instantiateSequenced( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
Object entity = ois.readObject();
@ -1584,14 +1581,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] collectionsByKey entries" );
LOG.deserializingCollectionsByKey(count);
rtn.collectionsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.collectionsByKey.put( CollectionKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] collectionEntries entries" );
LOG.deserializingCollectionEntries(count);
rtn.collectionEntries = IdentityMap.instantiateSequenced( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
final PersistentCollection pc = ( PersistentCollection ) ois.readObject();
@ -1601,14 +1598,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] arrayHolders entries" );
LOG.deserializingArrayHolders(count);
rtn.arrayHolders = IdentityMap.instantiate( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.arrayHolders.put( ois.readObject(), ois.readObject() );
}
count = ois.readInt();
log.trace( "staring deserialization of [" + count + "] nullifiableEntityKeys entries" );
LOG.deserializingNullifiableEntityKeys(count);
rtn.nullifiableEntityKeys = new HashSet();
for ( int i = 0; i < count; i++ ) {
rtn.nullifiableEntityKeys.add( EntityKey.deserialize( ois, session ) );
@ -1628,7 +1625,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
public void addChildParent(Object child, Object parent) {
parentsByChild.put(child, parent);
}
/**
* @see org.hibernate.engine.PersistenceContext#removeChildParent(java.lang.Object)
*/
@ -1680,4 +1677,106 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] arrayHolders entries" )
void deserializingArrayHolders( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] collectionEntries entries" )
void deserializingCollectionEntries( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] collectionsByKey entries" )
void deserializingCollectionsByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] entitiesByKey entries" )
void deserializingEntitiesByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] entitiesByUniqueKey entries" )
void deserializingEntitiesByUniqueKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] entityEntries entries" )
void deserializingEntityEntries( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] entitySnapshotsByKey entries" )
void deserializingEntitySnapshotsByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] nullifiableEntityKey entries" )
void deserializingNullifiableEntityKeys( int size );
@LogMessage( level = TRACE )
@Message( value = "Serializing persistent-context" )
void deserializingPersistentContext();
@LogMessage( level = TRACE )
@Message( value = "Starting deserialization of [%d] proxiesByKey entries" )
void deserializingProxiesByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Encountered prunded proxy" )
void encounteredPrunedProxy();
@LogMessage( level = DEBUG )
@Message( value = "Initializing non-lazy collections" )
void initializingNonLazyCollections();
@LogMessage( level = WARN )
@Message( value = "Narrowing proxy to %s - this operation breaks ==" )
void narrowingProxy( Class concreteProxyClass );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] arrayHolders entries" )
void serializingArrayHolders( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] collectionEntries entries" )
void serializingCollectionEntries( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] collectionsByKey entries" )
void serializingCollectionsByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] entitiesByKey entries" )
void serializingEntitiesByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] entitiesByUniqueKey entries" )
void serializingEntitiesByUniqueKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] entityEntries entries" )
void serializingEntityEntries( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] entitySnapshotsByKey entries" )
void serializingEntitySnapshotsByKey( int size );
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] nullifiableEntityKey entries" )
void serializingNullifiableEntityKeys( int size );
@LogMessage( level = TRACE )
@Message( value = "Serializing persistent-context" )
void serializingPersistentContext();
@LogMessage( level = TRACE )
@Message( value = "Starting serialization of [%d] proxiesByKey entries" )
void serializingProxiesByKey( int size );
@LogMessage( level = DEBUG )
@Message( value = "setting proxy identifier: %s" )
void settingProxyIdentifier( Serializable id );
}
}

View File

@ -23,15 +23,13 @@
*/
package org.hibernate.engine;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.CacheMode;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.cache.CacheKey;
import org.hibernate.cache.entry.CacheEntry;
import org.hibernate.event.PostLoadEvent;
@ -42,66 +40,72 @@ import org.hibernate.intercept.LazyPropertyInitializer;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.property.BackrefPropertyAccessor;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.type.Type;
import org.hibernate.type.TypeHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Functionality relating to Hibernate's two-phase loading process,
* that may be reused by persisters that do not use the Loader
* framework
*
*
* @author Gavin King
*/
public final class TwoPhaseLoad {
private static final Logger log = LoggerFactory.getLogger(TwoPhaseLoad.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
TwoPhaseLoad.class.getPackage().getName());
private TwoPhaseLoad() {}
/**
* Register the "hydrated" state of an entity instance, after the first step of 2-phase loading.
*
*
* Add the "hydrated state" (an array) of an uninitialized entity to the session. We don't try
* to resolve any associations yet, because there might be other entities waiting to be
* read from the JDBC result set we are currently processing
*/
public static void postHydrate(
final EntityPersister persister,
final Serializable id,
final Object[] values,
final EntityPersister persister,
final Serializable id,
final Object[] values,
final Object rowId,
final Object object,
final Object object,
final LockMode lockMode,
final boolean lazyPropertiesAreUnfetched,
final SessionImplementor session)
final boolean lazyPropertiesAreUnfetched,
final SessionImplementor session)
throws HibernateException {
Object version = Versioning.getVersion(values, persister);
session.getPersistenceContext().addEntry(
object,
session.getPersistenceContext().addEntry(
object,
Status.LOADING,
values,
rowId,
id,
version,
lockMode,
true,
persister,
false,
lazyPropertiesAreUnfetched
values,
rowId,
id,
version,
lockMode,
true,
persister,
false,
lazyPropertiesAreUnfetched
);
if ( log.isTraceEnabled() && version!=null ) {
if (LOG.isTraceEnabled() && version != null) {
String versionStr = persister.isVersioned()
? persister.getVersionType().toLoggableString( version, session.getFactory() )
: "null";
log.trace( "Version: " + versionStr );
LOG.version(versionStr);
}
}
/**
* Perform the second step of 2-phase load. Fully initialize the entity
* Perform the second step of 2-phase load. Fully initialize the entity
* instance.
*
* After processing a JDBC result set, we "resolve" all the associations
@ -109,14 +113,14 @@ public final class TwoPhaseLoad {
* "hydrated" into an array
*/
public static void initializeEntity(
final Object entity,
final Object entity,
final boolean readOnly,
final SessionImplementor session,
final PreLoadEvent preLoadEvent,
final PostLoadEvent postLoadEvent) throws HibernateException {
//TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!)
final PersistenceContext persistenceContext = session.getPersistenceContext();
EntityEntry entityEntry = persistenceContext.getEntry(entity);
if ( entityEntry == null ) {
@ -125,13 +129,9 @@ public final class TwoPhaseLoad {
EntityPersister persister = entityEntry.getPersister();
Serializable id = entityEntry.getId();
Object[] hydratedState = entityEntry.getLoadedState();
if ( log.isDebugEnabled() )
log.debug(
"resolving associations for " +
MessageHelper.infoString(persister, id, session.getFactory())
);
if (LOG.isDebugEnabled()) LOG.resolvingAssociations(MessageHelper.infoString(persister, id, session.getFactory()));
Type[] types = persister.getPropertyTypes();
for ( int i = 0; i < hydratedState.length; i++ ) {
final Object value = hydratedState[i];
@ -139,7 +139,7 @@ public final class TwoPhaseLoad {
hydratedState[i] = types[i].resolve( value, session, entity );
}
}
//Must occur after resolving identifiers!
if ( session.isEventSource() ) {
preLoadEvent.setEntity(entity).setState(hydratedState).setId(id).setPersister(persister);
@ -148,33 +148,31 @@ public final class TwoPhaseLoad {
listeners[i].onPreLoad(preLoadEvent);
}
}
persister.setPropertyValues( entity, hydratedState, session.getEntityMode() );
final SessionFactoryImplementor factory = session.getFactory();
if ( persister.hasCache() && session.getCacheMode().isPutEnabled() ) {
if ( log.isDebugEnabled() )
log.debug(
"adding entity to second-level cache: " +
MessageHelper.infoString( persister, id, session.getFactory() )
);
if (LOG.isDebugEnabled()) LOG.addingEntityToSecondLevelCache(MessageHelper.infoString(persister,
id,
session.getFactory()));
Object version = Versioning.getVersion(hydratedState, persister);
CacheEntry entry = new CacheEntry(
hydratedState,
persister,
entityEntry.isLoadedWithLazyPropertiesUnfetched(),
version,
session,
hydratedState,
persister,
entityEntry.isLoadedWithLazyPropertiesUnfetched(),
version,
session,
entity
);
CacheKey cacheKey = new CacheKey(
id,
persister.getIdentifierType(),
persister.getRootEntityName(),
session.getEntityMode(),
session.getFactory()
CacheKey cacheKey = new CacheKey(
id,
persister.getIdentifierType(),
persister.getRootEntityName(),
session.getEntityMode(),
session.getFactory()
);
// explicit handling of caching for rows just inserted and then somehow forced to be read
@ -219,30 +217,30 @@ public final class TwoPhaseLoad {
}
}
if ( isReallyReadOnly ) {
//no need to take a snapshot - this is a
//no need to take a snapshot - this is a
//performance optimization, but not really
//important, except for entities with huge
//important, except for entities with huge
//mutable property values
persistenceContext.setEntryStatus(entityEntry, Status.READ_ONLY);
}
else {
//take a snapshot
TypeHelper.deepCopy(
hydratedState,
persister.getPropertyTypes(),
persister.getPropertyUpdateability(),
hydratedState,
persister.getPropertyTypes(),
persister.getPropertyUpdateability(),
hydratedState, //after setting values to object, entityMode
session
);
persistenceContext.setEntryStatus(entityEntry, Status.MANAGED);
}
persister.afterInitialize(
entity,
entityEntry.isLoadedWithLazyPropertiesUnfetched(),
entity,
entityEntry.isLoadedWithLazyPropertiesUnfetched(),
session
);
if ( session.isEventSource() ) {
postLoadEvent.setEntity(entity).setId(id).setPersister(persister);
PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners();
@ -250,76 +248,95 @@ public final class TwoPhaseLoad {
listeners[i].onPostLoad(postLoadEvent);
}
}
if ( log.isDebugEnabled() )
log.debug(
"done materializing entity " +
MessageHelper.infoString( persister, id, session.getFactory() )
);
if (LOG.isDebugEnabled()) LOG.doneMaterializingEntity(MessageHelper.infoString(persister, id, session.getFactory()));
if ( factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatisticsImplementor().loadEntity( persister.getEntityName() );
}
}
private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
return ( session.getFactory().getSettings().isMinimalPutsEnabled() &&
return ( session.getFactory().getSettings().isMinimalPutsEnabled() &&
session.getCacheMode()!=CacheMode.REFRESH ) ||
( entityEntry.getPersister().hasLazyProperties() &&
entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
( entityEntry.getPersister().hasLazyProperties() &&
entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
entityEntry.getPersister().isLazyPropertiesCacheable() );
}
/**
* Add an uninitialized instance of an entity class, as a placeholder to ensure object
* Add an uninitialized instance of an entity class, as a placeholder to ensure object
* identity. Must be called before <tt>postHydrate()</tt>.
*
* Create a "temporary" entry for a newly instantiated entity. The entity is uninitialized,
* but we need the mapping from id to instance in order to guarantee uniqueness.
*/
public static void addUninitializedEntity(
final EntityKey key,
final Object object,
final EntityPersister persister,
final EntityKey key,
final Object object,
final EntityPersister persister,
final LockMode lockMode,
final boolean lazyPropertiesAreUnfetched,
final boolean lazyPropertiesAreUnfetched,
final SessionImplementor session
) {
session.getPersistenceContext().addEntity(
object,
Status.LOADING,
null,
key,
null,
lockMode,
true,
persister,
false,
object,
Status.LOADING,
null,
key,
null,
lockMode,
true,
persister,
false,
lazyPropertiesAreUnfetched
);
}
public static void addUninitializedCachedEntity(
final EntityKey key,
final Object object,
final EntityPersister persister,
final EntityKey key,
final Object object,
final EntityPersister persister,
final LockMode lockMode,
final boolean lazyPropertiesAreUnfetched,
final Object version,
final SessionImplementor session
) {
session.getPersistenceContext().addEntity(
object,
Status.LOADING,
null,
key,
version,
lockMode,
true,
persister,
false,
object,
Status.LOADING,
null,
key,
version,
lockMode,
true,
persister,
false,
lazyPropertiesAreUnfetched
);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Adding entity to second-level cache: %s" )
void addingEntityToSecondLevelCache( String infoString );
@LogMessage( level = DEBUG )
@Message( value = "Done materializing entity %s" )
void doneMaterializingEntity( String infoString );
@LogMessage( level = DEBUG )
@Message( value = "Resolving associations for %s" )
void resolvingAssociations( String infoString );
@LogMessage( level = TRACE )
@Message( value = "Version: %s" )
void version( String versionStr );
}
}

View File

@ -24,22 +24,26 @@
*/
package org.hibernate.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.TRACE;
import org.hibernate.MappingException;
import org.hibernate.id.IdentifierGeneratorHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A strategy for determining if a version value is an version of
* a new transient instance or a previously persistent transient instance.
* The strategy is determined by the <tt>unsaved-value</tt> attribute in
* the mapping file.
*
*
* @author Gavin King
*/
public class VersionValue {
private static final Logger log = LoggerFactory.getLogger(VersionValue.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
VersionValue.class.getPackage().getName());
private final Object value;
/**
@ -47,14 +51,17 @@ public class VersionValue {
* is null, otherwise assume it is a detached instance.
*/
public static final VersionValue NULL = new VersionValue() {
public final Boolean isUnsaved(Object version) {
log.trace("version unsaved-value strategy NULL");
@Override
public final Boolean isUnsaved(Object version) {
LOG.versionUnsavedValueStrategy("NULL");
return version==null ? Boolean.TRUE : Boolean.FALSE;
}
public Object getDefaultValue(Object currentValue) {
@Override
public Object getDefaultValue(Object currentValue) {
return null;
}
public String toString() {
@Override
public String toString() {
return "VERSION_SAVE_NULL";
}
};
@ -63,14 +70,17 @@ public class VersionValue {
* is null, otherwise defer to the identifier unsaved-value.
*/
public static final VersionValue UNDEFINED = new VersionValue() {
public final Boolean isUnsaved(Object version) {
log.trace("version unsaved-value strategy UNDEFINED");
@Override
public final Boolean isUnsaved(Object version) {
LOG.versionUnsavedValueStrategy("UNDEFINED");
return version==null ? Boolean.TRUE : null;
}
public Object getDefaultValue(Object currentValue) {
@Override
public Object getDefaultValue(Object currentValue) {
return currentValue;
}
public String toString() {
@Override
public String toString() {
return "VERSION_UNDEFINED";
}
};
@ -79,27 +89,26 @@ public class VersionValue {
* is negative, otherwise assume it is a detached instance.
*/
public static final VersionValue NEGATIVE = new VersionValue() {
public final Boolean isUnsaved(Object version) throws MappingException {
log.trace("version unsaved-value strategy NEGATIVE");
@Override
public final Boolean isUnsaved(Object version) throws MappingException {
LOG.versionUnsavedValueStrategy("NEGATIVE");
if (version==null) return Boolean.TRUE;
if (version instanceof Number) {
return ( (Number) version ).longValue() < 0l ? Boolean.TRUE : Boolean.FALSE;
}
else {
throw new MappingException("unsaved-value NEGATIVE may only be used with short, int and long types");
}
if (version instanceof Number) return ((Number)version).longValue() < 0l ? Boolean.TRUE : Boolean.FALSE;
throw new MappingException("unsaved-value NEGATIVE may only be used with short, int and long types");
}
public Object getDefaultValue(Object currentValue) {
@Override
public Object getDefaultValue(Object currentValue) {
return IdentifierGeneratorHelper.getIntegralDataTypeHolder( currentValue.getClass() )
.initialize( -1L )
.makeValue();
}
public String toString() {
@Override
public String toString() {
return "VERSION_NEGATIVE";
}
};
protected VersionValue() {
this.value = null;
}
@ -112,7 +121,7 @@ public class VersionValue {
public VersionValue(Object value) {
this.value = value;
}
/**
* Does the given version belong to a new instance?
*
@ -120,15 +129,31 @@ public class VersionValue {
* @return true is unsaved, false is saved, null is undefined
*/
public Boolean isUnsaved(Object version) throws MappingException {
if ( log.isTraceEnabled() ) log.trace("version unsaved-value: " + value);
LOG.versionUnsavedValue(value);
return version==null || version.equals(value) ? Boolean.TRUE : Boolean.FALSE;
}
public Object getDefaultValue(Object currentValue) {
return value;
}
public String toString() {
@Override
public String toString() {
return "version unsaved-value: " + value;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Version unsaved-value: %s" )
void versionUnsavedValue( Object value );
@LogMessage( level = TRACE )
@Message( value = "Version unsaved-value strategy %s" )
void versionUnsavedValueStrategy( String string );
}
}

View File

@ -24,11 +24,13 @@
*/
package org.hibernate.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.TRACE;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.type.VersionType;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Utilities for dealing with optimisitic locking values.
@ -59,7 +61,8 @@ public final class Versioning {
*/
public static final int OPTIMISTIC_LOCK_DIRTY = 1;
private static final Logger log = LoggerFactory.getLogger( Versioning.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
Versioning.class.getPackage().getName());
/**
* Private constructor disallowing instantiation.
@ -76,7 +79,7 @@ public final class Versioning {
*/
private static Object seed(VersionType versionType, SessionImplementor session) {
Object seed = versionType.seed( session );
if ( log.isTraceEnabled() ) log.trace("Seeding: " + seed);
LOG.seeding(seed);
return seed;
}
@ -110,9 +113,7 @@ public final class Versioning {
return true;
}
else {
if ( log.isTraceEnabled() ) {
log.trace( "using initial version: " + initialVersion );
}
LOG.usingInitialVersion(initialVersion);
return false;
}
}
@ -129,14 +130,8 @@ public final class Versioning {
*/
public static Object increment(Object version, VersionType versionType, SessionImplementor session) {
Object next = versionType.next( version, session );
if ( log.isTraceEnabled() ) {
log.trace(
"Incrementing: " +
versionType.toLoggableString( version, session.getFactory() ) +
" to " +
versionType.toLoggableString( next, session.getFactory() )
);
}
if (LOG.isTraceEnabled()) LOG.incrementing(versionType.toLoggableString(version, session.getFactory()),
versionType.toLoggableString(next, session.getFactory()));
return next;
}
@ -191,5 +186,23 @@ public final class Versioning {
return false;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Incrementing: %s to %s" )
void incrementing( String loggableString,
String loggableString2 );
@LogMessage( level = TRACE )
@Message( value = "Seeding: %s" )
void seeding( Object seed );
@LogMessage( level = TRACE )
@Message( value = "Using initial version: %s" )
void usingInitialVersion( Object initialVersion );
}
}

View File

@ -23,17 +23,18 @@
*/
package org.hibernate.engine.jdbc;
import static org.jboss.logging.Logger.Level.WARN;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.util.JDBCExceptionReporter;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A proxy for a ResultSet delegate, responsible for locally caching the columnName-to-columnIndex resolution that
@ -43,7 +44,9 @@ import org.hibernate.util.JDBCExceptionReporter;
* @author Gail Badner
*/
public class ResultSetWrapperProxy implements InvocationHandler {
private static final Logger log = LoggerFactory.getLogger( ResultSetWrapperProxy.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
ResultSetWrapperProxy.class.getPackage().getName());
private static final Class[] PROXY_INTERFACES = new Class[] { ResultSet.class };
private final ResultSet rs;
@ -110,14 +113,9 @@ public class ResultSetWrapperProxy implements InvocationHandler {
JDBCExceptionReporter.logExceptions( ex, buf.toString() );
}
catch ( NoSuchMethodException ex ) {
StringBuffer buf = new StringBuffer()
.append( "Exception switching from method: [" )
.append( method )
.append( "] to a method using the column index. Reverting to using: [" )
.append( method )
.append( "]" );
if ( log.isWarnEnabled() ) {
log.warn( buf.toString() );
if (LOG.isEnabled(WARN)) {
StringBuffer buf = new StringBuffer().append("Exception switching from method: [").append(method).append("] to a method using the column index. Reverting to using: [").append(method).append("]");
LOG.missingMethod(buf.toString());
}
}
}
@ -191,4 +189,15 @@ public class ResultSetWrapperProxy implements InvocationHandler {
throw e.getTargetException();
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "%s" )
void missingMethod( String string );
}
}

View File

@ -27,10 +27,6 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.jdbc.batch.spi.Batch;
import org.hibernate.engine.jdbc.batch.spi.BatchObserver;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
@ -42,7 +38,9 @@ import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
* @author Steve Ebersole
*/
public abstract class AbstractBatchImpl implements Batch {
private static final Logger log = LoggerFactory.getLogger( AbstractBatchImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
AbstractBatchImpl.class.getPackage().getName());
private final SQLStatementLogger statementLogger;
private final SQLExceptionHelper exceptionHelper;
@ -176,8 +174,8 @@ public abstract class AbstractBatchImpl implements Batch {
statement.close();
}
catch ( SQLException e ) {
log.error( "unable to release batch statement..." );
log.error( "sqlexception escaped proxy", e );
LOG.unableToReleaseBatchStatement();
LOG.sqlExceptionEscapedProxy(e.getMessage());
}
}
getStatements().clear();
@ -199,9 +197,7 @@ public abstract class AbstractBatchImpl implements Batch {
}
public void release() {
if ( getStatements() != null && !getStatements().isEmpty() ) {
log.info( "On release of batch it still contained JDBC statements" );
}
if (getStatements() != null && !getStatements().isEmpty()) LOG.batchContainedStatementsOnRelease();
releaseStatements();
observers.clear();
}

View File

@ -23,11 +23,7 @@
*/
package org.hibernate.engine.jdbc.batch.internal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.jdbc.batch.spi.Batch;
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
@ -37,7 +33,9 @@ import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
* @author Steve Ebersole
*/
public class BatchBuilder {
private static final Logger log = LoggerFactory.getLogger( BatchBuilder.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
BatchBuilder.class.getPackage().getName());
private int size;

View File

@ -29,10 +29,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
@ -47,7 +43,9 @@ import org.hibernate.jdbc.Expectation;
* @author Steve Ebersole
*/
public class BatchingBatch extends AbstractBatchImpl {
private static final Logger log = LoggerFactory.getLogger( BatchingBatch.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
BatchingBatch.class.getPackage().getName());
private final int batchSize;
@ -83,7 +81,7 @@ public class BatchingBatch extends AbstractBatchImpl {
statement.addBatch();
}
catch ( SQLException e ) {
log.error( "sqlexception escaped proxy", e );
LOG.error( LOG.sqlExceptionEscapedProxy(), e );
throw getSqlExceptionHelper().convert( e, "could not perform addBatch", sql );
}
List<Expectation> expectations = expectationsBySql.get( sql );
@ -106,7 +104,8 @@ public class BatchingBatch extends AbstractBatchImpl {
/**
* {@inheritDoc}
*/
protected void doExecuteBatch() {
@Override
protected void doExecuteBatch() {
if ( maxBatchPosition == 0 ) {
log.debug( "no batched statements to execute" );
}
@ -120,13 +119,13 @@ public class BatchingBatch extends AbstractBatchImpl {
executeStatements();
}
catch ( RuntimeException re ) {
log.error( "Exception executing batch [{}]", re.getMessage() );
LOG.unableToExecuteBatch(re.getMessage());
throw re;
}
finally {
for ( List<Expectation> expectations : expectationsBySql.values() ) {
expectations.clear();
}
}
maxBatchPosition = 0;
}
}
@ -188,7 +187,8 @@ public class BatchingBatch extends AbstractBatchImpl {
}
}
public void release() {
@Override
public void release() {
expectationsBySql.clear();
maxBatchPosition = 0;
}

View File

@ -25,10 +25,6 @@ package org.hibernate.engine.jdbc.batch.internal;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
import org.hibernate.jdbc.Expectation;
@ -40,7 +36,9 @@ import org.hibernate.jdbc.Expectation;
* @author Steve Ebersole
*/
public class NonBatchingBatch extends AbstractBatchImpl {
private static final Logger log = LoggerFactory.getLogger( NonBatchingBatch.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
NonBatchingBatch.class.getPackage().getName());
protected NonBatchingBatch(Object key,
SQLStatementLogger statementLogger,
@ -65,7 +63,8 @@ public class NonBatchingBatch extends AbstractBatchImpl {
}
}
protected void doExecuteBatch() {
@Override
protected void doExecuteBatch() {
// nothing to do
}
}

View File

@ -25,15 +25,15 @@
package org.hibernate.engine.jdbc.internal;
import java.io.ObjectOutputStream;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.SQLException;
import javax.transaction.TransactionManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.HibernateException;
import org.hibernate.Interceptor;
@ -46,10 +46,19 @@ import org.hibernate.transaction.synchronization.CallbackCoordinator;
import org.hibernate.transaction.synchronization.HibernateSynchronizationImpl;
import org.hibernate.util.JTAHelper;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.exception.JDBCExceptionHelper;
import org.hibernate.transaction.TransactionFactory;
import org.hibernate.transaction.synchronization.CallbackCoordinator;
import org.hibernate.transaction.synchronization.HibernateSynchronizationImpl;
import org.hibernate.util.JTAHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Acts as the mediary between "entity-mode related" sessions in terms of
* their interaction with the JDBC data store.
* Acts as the intermediary between "entity-mode related" sessions in terms of their interaction with the JDBC data store.
*
* @author Steve Ebersole
*/
@ -63,7 +72,8 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
// ConnectionManager is a "JDBCContext"? A "SessionContext" should
// live in the impl package...
private static final Logger log = LoggerFactory.getLogger( JDBCContextImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
JDBCContext.class.getPackage().getName());
private Context owner;
private ConnectionManagerImpl connectionManager;
@ -154,6 +164,8 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
throw new SessionException( "Session is closed" );
}
public Connection connection() throws HibernateException {
if (owner.isClosed()) throw new SessionException("Session is closed");
return connectionManager.getConnection();
}
@ -162,72 +174,52 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
if ( isTransactionCallbackRegistered ) {
return false;
}
else {
isTransactionCallbackRegistered = true;
return true;
}
isTransactionCallbackRegistered = true;
return true;
}
@Override
public boolean registerSynchronizationIfPossible() {
if ( isTransactionCallbackRegistered ) {
// we already have a callback registered; either a local
// (org.hibernate.Transaction) transaction has accepted
// callback responsibilities, or we have previously
// registered a transaction synch.
return true;
}
// we already have a callback registered; either a local
// (org.hibernate.Transaction) transaction has accepted
// callback responsibilities, or we have previously
// registered a transaction synch.
if (isTransactionCallbackRegistered) return true;
boolean localCallbacksOnly = owner.getFactory().getSettings()
.getTransactionFactory()
.areCallbacksLocalToHibernateTransactions();
if ( localCallbacksOnly ) {
// the configured transaction-factory says it only supports
// local callback mode, so no sense attempting to register a
// JTA Synchronization
return false;
}
// the configured transaction-factory says it only supports
// local callback mode, so no sense attempting to register a
// JTA Synchronization
if (localCallbacksOnly) return false;
TransactionManager tm = owner.getFactory().getTransactionManager();
if ( tm == null ) {
// if there is no TM configured, we will not be able to access
// the javax.transaction.Transaction object in order to
// register a synch anyway.
return false;
}
else {
try {
if ( !isTransactionInProgress() ) {
log.trace( "TransactionFactory reported no active transaction; Synchronization not registered" );
return false;
}
else {
javax.transaction.Transaction tx = tm.getTransaction();
if ( JTAHelper.isMarkedForRollback( tx ) ) {
// transactions marked for rollback-only cause some TM impls to throw exceptions
log.debug( "Transaction is marked for rollback; skipping Synchronization registration" );
return false;
}
else {
if ( hibernateTransaction == null ) {
hibernateTransaction = owner.getFactory().getSettings().getTransactionFactory().createTransaction( this, owner );
}
tx.registerSynchronization(
new HibernateSynchronizationImpl( getJtaSynchronizationCallbackCoordinator( tx ) )
);
// if there is no TM configured, we will not be able to access
// the javax.transaction.Transaction object in order to
// register a synch anyway.
if (tm == null) return false;
try {
if (!isTransactionInProgress()) {
LOG.noActiveTransaction();
return false;
}
javax.transaction.Transaction tx = tm.getTransaction();
if (JTAHelper.isMarkedForRollback(tx)) {
// transactions marked for rollback-only cause some TM impls to throw exceptions
LOG.transactionMarkedForRollback();
return false;
}
if (hibernateTransaction == null) hibernateTransaction = owner.getFactory().getSettings().getTransactionFactory().createTransaction(this,
owner);
tx.registerSynchronization(new HibernateSynchronizationImpl(getJtaSynchronizationCallbackCoordinator(tx)));
// tx.registerSynchronization( new CacheSynchronization(owner, this, tx, hibernateTransaction) );
isTransactionCallbackRegistered = true;
log.debug("successfully registered Synchronization");
return true;
}
}
}
catch( HibernateException e ) {
throw e;
}
catch (Exception e) {
throw new TransactionException( "could not register synchronization with JTA TransactionManager", e );
}
}
isTransactionCallbackRegistered = true;
LOG.successfullyRegisteredSynchronization();
return true;
} catch (HibernateException e) {
throw e;
} catch (Exception e) {
throw new TransactionException("could not register synchronization with JTA TransactionManager", e);
}
}
@Override
@ -248,23 +240,23 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
@Override
public void beforeTransactionCompletion(Transaction tx) {
log.trace( "before transaction completion" );
LOG.beforeTransactionCompletion();
owner.beforeTransactionCompletion(tx);
}
/**
* We cannot rely upon this method being called! It is only
* called if we are using Hibernate Transaction API.
*/
@Override
public void afterTransactionBegin(Transaction tx) {
log.trace( "after transaction begin" );
LOG.afterTransactionBegin();
owner.afterTransactionBegin(tx);
}
@Override
public void afterTransactionCompletion(boolean success, Transaction tx) {
log.trace( "after transaction completion" );
LOG.afterTransactionCompletion();
if ( getFactory().getStatistics().isStatisticsEnabled() ) {
getFactory().getStatisticsImplementor().endTransaction(success);
@ -276,23 +268,23 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
hibernateTransaction = null;
owner.afterTransactionCompletion(success, tx);
}
/**
* Called after executing a query outside the scope of
* a Hibernate or JTA transaction
*/
@Override
public void afterNontransactionalQuery(boolean success) {
log.trace( "after autocommit" );
LOG.afterAutoCommit();
try {
// check to see if the connection is in auto-commit
// check to see if the connection is in auto-commit
// mode (no connection means aggressive connection
// release outside a JTA transaction context, so MUST
// be autocommit mode)
boolean isAutocommit = connectionManager.isAutoCommit();
connectionManager.afterTransaction();
if ( isAutocommit ) {
owner.afterTransactionCompletion(success, null);
}
@ -366,4 +358,39 @@ public class JDBCContextImpl implements ConnectionManagerImpl.Callback, JDBCCont
);
return jdbcContext;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "After autocommit" )
void afterAutoCommit();
@LogMessage( level = TRACE )
@Message( value = "After transaction begin" )
void afterTransactionBegin();
@LogMessage( level = TRACE )
@Message( value = "After transaction completion" )
void afterTransactionCompletion();
@LogMessage( level = TRACE )
@Message( value = "Before transaction completion" )
void beforeTransactionCompletion();
@LogMessage( level = TRACE )
@Message( value = "TransactionFactory reported no active transaction; Synchronization not registered" )
void noActiveTransaction();
@LogMessage( level = DEBUG )
@Message( value = "Successfully registered Synchronization" )
void successfullyRegisteredSynchronization();
@LogMessage( level = DEBUG )
@Message( value = "Transaction is marked for rollback; skipping Synchronization registration" )
void transactionMarkedForRollback();
}
}

View File

@ -23,6 +23,9 @@
*/
package org.hibernate.engine.jdbc.internal;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
@ -30,15 +33,16 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.spi.InvalidatableWrapper;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.JdbcWrapper;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.InvalidatableWrapper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Logger.Level;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Standard implementation of the {@link org.hibernate.engine.jdbc.spi.JdbcResourceRegistry} contract
@ -46,7 +50,9 @@ import org.hibernate.engine.jdbc.spi.InvalidatableWrapper;
* @author Steve Ebersole
*/
public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
private static final Logger log = LoggerFactory.getLogger( JdbcResourceRegistryImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
JdbcResourceRegistryImpl.class.getPackage().getName());
private final HashMap<Statement,Set<ResultSet>> xref = new HashMap<Statement,Set<ResultSet>>();
private final Set<ResultSet> unassociatedResultSets = new HashSet<ResultSet>();
@ -59,7 +65,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void register(Statement statement) {
log.trace( "registering statement [" + statement + "]" );
LOG.registeringStatement(statement);
if ( xref.containsKey( statement ) ) {
throw new HibernateException( "statement already registered with JDBCContainer" );
}
@ -95,7 +101,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void release(Statement statement) {
log.trace( "releasing statement [" + statement + "]" );
LOG.releasingStatement(statement);
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets != null ) {
for ( ResultSet resultSet : resultSets ) {
@ -108,7 +114,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void register(ResultSet resultSet) {
log.trace( "registering result set [" + resultSet + "]" );
LOG.registeringResultSet(resultSet);
Statement statement;
try {
statement = resultSet.getStatement();
@ -117,9 +123,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
throw exceptionHelper.convert( e, "unable to access statement from resultset" );
}
if ( statement != null ) {
if ( log.isWarnEnabled() && !xref.containsKey( statement ) ) {
log.warn( "resultset's statement was not yet registered" );
}
if (LOG.isEnabled(Level.WARN) && !xref.containsKey(statement)) LOG.unregisteredStatement();
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets == null ) {
resultSets = new HashSet<ResultSet>();
@ -133,7 +137,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void release(ResultSet resultSet) {
log.trace( "releasing result set [{}]", resultSet );
LOG.releasingResultSet(resultSet);
Statement statement;
try {
statement = resultSet.getStatement();
@ -142,9 +146,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
throw exceptionHelper.convert( e, "unable to access statement from resultset" );
}
if ( statement != null ) {
if ( log.isWarnEnabled() && !xref.containsKey( statement ) ) {
log.warn( "resultset's statement was not registered" );
}
if (LOG.isEnabled(Level.WARN) && !xref.containsKey(statement)) LOG.unregisteredStatement();
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets != null ) {
resultSets.remove( resultSet );
@ -155,9 +157,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
else {
boolean removed = unassociatedResultSets.remove( resultSet );
if ( !removed ) {
log.warn( "ResultSet had no statement associated with it, but was not yet registered" );
}
if (!removed) LOG.unregisteredResultSetWithoutStatement();
}
close( resultSet );
}
@ -167,7 +167,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void releaseResources() {
log.trace( "releasing JDBC container resources [{}]", this );
LOG.releasingJdbcContainerResources(this);
cleanup();
}
@ -243,13 +243,13 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
public void close() {
log.trace( "closing JDBC container [{}]", this );
LOG.closingJdbcContainer(this);
cleanup();
}
@SuppressWarnings({ "unchecked" })
protected void close(Statement statement) {
log.trace( "closing prepared statement [{}]", statement );
LOG.closingPreparedStatement(statement);
if ( statement instanceof InvalidatableWrapper ) {
InvalidatableWrapper<Statement> wrapper = ( InvalidatableWrapper<Statement> ) statement;
@ -271,7 +271,7 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
catch( SQLException sqle ) {
// there was a problem "cleaning" the prepared statement
log.debug( "Exception clearing maxRows/queryTimeout [{}]", sqle.getMessage() );
LOG.unableToClearMaxRowsQueryTimeout(sqle.getMessage());
return; // EARLY EXIT!!!
}
statement.close();
@ -280,13 +280,13 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
}
}
catch( SQLException sqle ) {
log.debug( "Unable to release statement [{}]", sqle.getMessage() );
LOG.unableToReleaseStatement(sqle.getMessage());
}
}
@SuppressWarnings({ "unchecked" })
protected void close(ResultSet resultSet) {
log.trace( "closing result set [{}]", resultSet );
LOG.closingResultSet(resultSet);
if ( resultSet instanceof InvalidatableWrapper ) {
InvalidatableWrapper<ResultSet> wrapper = (InvalidatableWrapper<ResultSet>) resultSet;
@ -298,8 +298,66 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
resultSet.close();
}
catch( SQLException e ) {
log.debug( "Unable to release result set [{}]", e.getMessage() );
LOG.unableToReleaseResultSet(e.getMessage());
}
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Closing JDBC container [%s]" )
void closingJdbcContainer( JdbcResourceRegistryImpl jdbcResourceRegistryImpl );
@LogMessage( level = TRACE )
@Message( value = "Closing prepared statement [%s]" )
void closingPreparedStatement( Statement statement );
@LogMessage( level = TRACE )
@Message( value = "Closing result set [%s]" )
void closingResultSet( ResultSet resultSet );
@LogMessage( level = TRACE )
@Message( value = "Registering result set [%s]" )
void registeringResultSet( ResultSet resultSet );
@LogMessage( level = TRACE )
@Message( value = "Registering statement [%s]" )
void registeringStatement( Statement statement );
@LogMessage( level = TRACE )
@Message( value = "Releasing JDBC container resources [%s]" )
void releasingJdbcContainerResources( JdbcResourceRegistryImpl jdbcResourceRegistryImpl );
@LogMessage( level = TRACE )
@Message( value = "Releasing result set [%s]" )
void releasingResultSet( ResultSet resultSet );
@LogMessage( level = TRACE )
@Message( value = "Releasing statement [%s]" )
void releasingStatement( Statement statement );
@LogMessage( level = DEBUG )
@Message( value = "Exception clearing maxRows/queryTimeout [%s]" )
void unableToClearMaxRowsQueryTimeout( String message );
@LogMessage( level = DEBUG )
@Message( value = "Unable to release result set [%s]" )
void unableToReleaseResultSet( String message );
@LogMessage( level = DEBUG )
@Message( value = "Unable to release statement [%s]" )
void unableToReleaseStatement( String message );
@LogMessage( level = WARN )
@Message( value = "ResultSet's statement was not registered" )
void unregisteredStatement();
@LogMessage( level = WARN )
@Message( value = "ResultSet had no statement associated with it, but was not yet registered" )
void unregisteredResultSetWithoutStatement();
}
}

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.engine.jdbc.internal;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.lang.reflect.InvocationTargetException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
@ -32,14 +34,8 @@ import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.service.jdbc.dialect.spi.DialectFactory;
import org.hibernate.engine.jdbc.spi.ExtractedDatabaseMetaData;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
@ -47,10 +43,16 @@ import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
import org.hibernate.engine.jdbc.spi.SchemaNameResolver;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.internal.util.jdbc.TypeInfo;
import org.hibernate.internal.util.jdbc.TypeInfoExtracter;
import org.hibernate.service.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.service.jdbc.dialect.spi.DialectFactory;
import org.hibernate.service.spi.Configurable;
import org.hibernate.service.spi.InjectService;
import org.hibernate.util.ReflectHelper;
import org.hibernate.internal.util.jdbc.TypeInfoExtracter;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Standard implementation of the {@link JdbcServices} contract
@ -58,7 +60,9 @@ import org.hibernate.internal.util.jdbc.TypeInfoExtracter;
* @author Steve Ebersole
*/
public class JdbcServicesImpl implements JdbcServices, Configurable {
private static final Logger log = LoggerFactory.getLogger( JdbcServicesImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
JdbcServicesImpl.class.getPackage().getName());
private ConnectionProvider connectionProvider;
@ -107,19 +111,15 @@ public class JdbcServicesImpl implements JdbcServices, Configurable {
Connection conn = connectionProvider.getConnection();
try {
DatabaseMetaData meta = conn.getMetaData();
log.info( "Database ->\n" +
" name : " + meta.getDatabaseProductName() + '\n' +
" version : " + meta.getDatabaseProductVersion() + '\n' +
" major : " + meta.getDatabaseMajorVersion() + '\n' +
" minor : " + meta.getDatabaseMinorVersion()
);
log.info( "Driver ->\n" +
" name : " + meta.getDriverName() + '\n' +
" version : " + meta.getDriverVersion() + '\n' +
" major : " + meta.getDriverMajorVersion() + '\n' +
" minor : " + meta.getDriverMinorVersion()
);
log.info( "JDBC version : " + meta.getJDBCMajorVersion() + "." + meta.getJDBCMinorVersion() );
LOG.database(meta.getDatabaseProductName(),
meta.getDatabaseProductVersion(),
meta.getDatabaseMajorVersion(),
meta.getDatabaseMinorVersion());
LOG.driver(meta.getDriverName(),
meta.getDriverVersion(),
meta.getDriverMajorVersion(),
meta.getDriverMinorVersion());
LOG.jdbcVersion(meta.getJDBCMajorVersion(), meta.getJDBCMinorVersion());
metaSupportsScrollable = meta.supportsResultSetType( ResultSet.TYPE_SCROLL_INSENSITIVE );
metaSupportsBatchUpdates = meta.supportsBatchUpdates();
@ -144,14 +144,14 @@ public class JdbcServicesImpl implements JdbcServices, Configurable {
}
}
catch ( SQLException sqle ) {
log.warn( "Could not obtain connection metadata", sqle );
LOG.unableToObtainConnectionMetadata(sqle.getMessage());
}
finally {
connectionProvider.closeConnection( conn );
}
}
catch ( SQLException sqle ) {
log.warn( "Could not obtain connection to query metadata", sqle );
LOG.unableToObtainConnectionToQueryMetadata(sqle.getMessage());
dialect = dialectFactory.buildDialect( configValues, null );
}
catch ( UnsupportedOperationException uoe ) {
@ -200,13 +200,13 @@ public class JdbcServicesImpl implements JdbcServices, Configurable {
return (SchemaNameResolver) ReflectHelper.getDefaultConstructor( resolverClass ).newInstance();
}
catch ( ClassNotFoundException e ) {
log.warn( "Unable to locate configured schema name resolver class [" + resolverClassName + "]" + e.toString() );
LOG.unableToLocateConfiguredSchemaNameResolver(resolverClassName, e.toString());
}
catch ( InvocationTargetException e ) {
log.warn( "Unable to instantiate configured schema name resolver [" + resolverClassName + "]" + e.getTargetException().toString() );
LOG.unableToInstantiateConfiguredSchemaNameResolver(resolverClassName, e.getTargetException().toString());
}
catch ( Exception e ) {
log.warn( "Unable to instantiate configured schema name resolver [" + resolverClassName + "]" + e.toString() );
LOG.unableToInstantiateConfiguredSchemaNameResolver(resolverClassName, e.toString());
}
}
return null;
@ -336,4 +336,60 @@ public class JdbcServicesImpl implements JdbcServices, Configurable {
public ExtractedDatabaseMetaData getExtractedMetaDataSupport() {
return extractedMetaDataSupport;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
// @formatter:off
@Message( value = "Database ->\n" +
" name : %s\n" +
" version : %s\n" +
" major : %s\n" +
" minor : %s" )
// @formatter:on
void database( String databaseProductName,
String databaseProductVersion,
int databaseMajorVersion,
int databaseMinorVersion );
@LogMessage( level = INFO )
// @formatter:off
@Message( value = "Driver ->\n" +
" name : %s\n" +
" version : %s\n" +
" major : %s\n" +
" minor : %s" )
// @formatter:on
void driver( String driverProductName,
String driverProductVersion,
int driverMajorVersion,
int driverMinorVersion );
@LogMessage( level = INFO )
@Message( value = "JDBC version : %d.%d" )
void jdbcVersion( int jdbcMajorVersion,
int jdbcMinorVersion );
@LogMessage( level = WARN )
@Message( value = "Unable to instantiate configured schema name resolver [%s] %s" )
void unableToInstantiateConfiguredSchemaNameResolver( String resolverClassName,
String message );
@LogMessage( level = WARN )
@Message( value = "Unable to locate configured schema name resolver class [%s] %s" )
void unableToLocateConfiguredSchemaNameResolver( String resolverClassName,
String message );
@LogMessage( level = WARN )
@Message( value = "Could not obtain connection metadata : %s" )
void unableToObtainConnectionMetadata( String message );
@LogMessage( level = WARN )
@Message( value = "Could not obtain connection to query metadata : %s" )
void unableToObtainConnectionToQueryMetadata( String message );
}
}

View File

@ -30,16 +30,12 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
import org.hibernate.jdbc.BorrowedConnectionProxy;
import org.hibernate.stat.StatisticsImplementor;
@ -50,7 +46,9 @@ import org.hibernate.stat.StatisticsImplementor;
* @author Steve Ebersole
*/
public class LogicalConnectionImpl implements LogicalConnectionImplementor {
private static final Logger log = LoggerFactory.getLogger( LogicalConnectionImpl.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
LogicalConnectionImpl.class.getPackage().getName());
private Connection physicalConnection;
private Connection borrowedConnection;
@ -200,11 +198,11 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
// no matter what
physicalConnection = null;
isClosed = true;
log.trace( "logical connection closed" );
LOG.closedLogicalConnection();
for ( ConnectionObserver observer : observers ) {
observer.logicalConnectionClosed();
}
}
}
}
/**
@ -245,19 +243,19 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
}
public void afterStatementExecution() {
log.trace( "starting after statement execution processing [{}]", connectionReleaseMode );
LOG.startingAfterStatementExecution(connectionReleaseMode);
if ( connectionReleaseMode == ConnectionReleaseMode.AFTER_STATEMENT ) {
if ( ! releasesEnabled ) {
log.debug( "skipping aggressive release due to manual disabling" );
LOG.skippingAggressiveReleaseDueToManualDisabling();
return;
}
if ( jdbcResourceRegistry.hasRegisteredResources() ) {
log.debug( "skipping aggressive release due to registered resources" );
LOG.skippingAggressiveReleaseDueToRegisteredResources();
return;
}
else if ( borrowedConnection != null ) {
log.debug( "skipping aggresive-release due to borrowed connection" );
}
}
releaseConnection();
}
}
@ -266,7 +264,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
if ( connectionReleaseMode == ConnectionReleaseMode.AFTER_STATEMENT ||
connectionReleaseMode == ConnectionReleaseMode.AFTER_TRANSACTION ) {
if ( jdbcResourceRegistry.hasRegisteredResources() ) {
log.info( "forcing container resource cleanup on transaction completion" );
LOG.forcingContainerResourceCleanup();
jdbcResourceRegistry.releaseResources();
}
aggressiveRelease();
@ -274,12 +272,12 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
}
public void disableReleases() {
log.trace( "disabling releases" );
LOG.disablingReleases();
releasesEnabled = false;
}
public void enableReleases() {
log.trace( "(re)enabling releases" );
LOG.enablingReleases();
releasesEnabled = true;
//FIXME: uncomment after new batch stuff is integrated!!!
//afterStatementExecution();
@ -290,10 +288,10 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
*/
public void aggressiveRelease() {
if ( isUserSuppliedConnection ) {
log.debug( "cannot aggressively release user-supplied connection; skipping" );
LOG.unableToAggressivelyReleaseUserSuppliedConnection();
}
else {
log.debug( "aggressively releasing JDBC connection" );
LOG.aggressivelyReleasingJdbcConnection( );
if ( physicalConnection != null ) {
releaseConnection();
}
@ -307,13 +305,13 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
* @throws org.hibernate.JDBCException Indicates problem opening a connection
*/
private void obtainConnection() throws JDBCException {
log.debug( "obtaining JDBC connection" );
LOG.obtainingJdbcConnection();
try {
physicalConnection = getJdbcServices().getConnectionProvider().getConnection();
for ( ConnectionObserver observer : observers ) {
observer.physicalConnectionObtained( physicalConnection );
}
log.debug( "obtained JDBC connection" );
LOG.obtainedJdbcConnection();
}
catch ( SQLException sqle) {
throw getJdbcServices().getSqlExceptionHelper().convert( sqle, "Could not open connection" );
@ -326,7 +324,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
* @throws JDBCException Indicates problem closing a connection
*/
private void releaseConnection() throws JDBCException {
log.debug( "releasing JDBC connection" );
LOG.releasingJdbcConnection();
if ( physicalConnection == null ) {
return;
}
@ -337,7 +335,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
if ( !isUserSuppliedConnection ) {
getJdbcServices().getConnectionProvider().closeConnection( physicalConnection );
}
log.debug( "released JDBC connection" );
LOG.releasedJdbcConnection();
}
catch (SQLException sqle) {
throw getJdbcServices().getSqlExceptionHelper().convert( sqle, "Could not close connection" );
@ -427,4 +425,72 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
);
}
/**
* Interface defining messages that may be logged by the outer class
*/
/*
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Aggressively releasing JDBC connection" )
void aggressivelyReleasingJdbcConnection();
@LogMessage( level = TRACE )
@Message( value = "Logical connection closed" )
void closedLogicalConnection();
@LogMessage( level = TRACE )
@Message( value = "Closing logical connection" )
void closingLogicalConnection();
@LogMessage( level = TRACE )
@Message( value = "Disabling releases" )
void disablingReleases();
@LogMessage( level = TRACE )
@Message( value = "(Re)enabling releases" )
void enablingReleases();
@LogMessage( level = INFO )
@Message( value = "Forcing container resource cleanup on transaction completion" )
void forcingContainerResourceCleanup();
@LogMessage( level = DEBUG )
@Message( value = "Obtained JDBC connection" )
void obtainedJdbcConnection();
@LogMessage( level = DEBUG )
@Message( value = "Obtaining JDBC connection" )
void obtainingJdbcConnection();
@LogMessage( level = DEBUG )
@Message( value = "Released JDBC connection" )
void releasedJdbcConnection();
@LogMessage( level = DEBUG )
@Message( value = "Releasing JDBC connection" )
void releasingJdbcConnection();
@LogMessage( level = DEBUG )
@Message( value = "Skipping aggressive release due to manual disabling" )
void skippingAggressiveReleaseDueToManualDisabling();
@LogMessage( level = DEBUG )
@Message( value = "Skipping aggressive release due to registered resources" )
void skippingAggressiveReleaseDueToRegisteredResources();
@LogMessage( level = TRACE )
@Message( value = "Starting after statement execution processing [%s]" )
void startingAfterStatementExecution( ConnectionReleaseMode connectionReleaseMode );
@LogMessage( level = DEBUG )
@Message( value = "Cannot aggressively release user-supplied connection; skipping" )
void unableToAggressivelyReleaseUserSuppliedConnection();
@LogMessage( level = DEBUG )
@Message( value = "Connection provider reports to not support aggressive release; overriding" )
void unsupportedAggressiveRelease();
}
*/
}

View File

@ -23,17 +23,18 @@
*/
package org.hibernate.engine.jdbc.internal.proxy;
import static org.jboss.logging.Logger.Level.TRACE;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Basic support for building {@link ResultSet}-based proxy handlers
@ -41,7 +42,9 @@ import org.hibernate.engine.jdbc.spi.JdbcServices;
* @author Steve Ebersole
*/
public abstract class AbstractResultSetProxyHandler extends AbstractProxyHandler {
private static final Logger log = LoggerFactory.getLogger( AbstractResultSetProxyHandler.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
AbstractResultSetProxyHandler.class.getPackage().getName());
private ResultSet resultSet;
@ -65,9 +68,10 @@ public abstract class AbstractResultSetProxyHandler extends AbstractProxyHandler
return resultSet;
}
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
@Override
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
log.trace( "Handling invocation of ResultSet method [{}]", methodName );
LOG.handlingInvocationOfResultSetMethod(methodName);
// other methods allowed while invalid ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if ( "close".equals( methodName ) ) {
@ -123,4 +127,15 @@ public abstract class AbstractResultSetProxyHandler extends AbstractProxyHandler
resultSet = null;
invalidate();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Handling invocation of ResultSet method [%s]" )
void handlingInvocationOfResultSetMethod( String methodName );
}
}

View File

@ -23,19 +23,20 @@
*/
package org.hibernate.engine.jdbc.internal.proxy;
import static org.jboss.logging.Logger.Level.TRACE;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Basic support for building {@link Statement}-based proxy handlers
@ -43,7 +44,9 @@ import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
* @author Steve Ebersole
*/
public abstract class AbstractStatementProxyHandler extends AbstractProxyHandler {
private static final Logger log = LoggerFactory.getLogger( AbstractStatementProxyHandler.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
AbstractStatementProxyHandler.class.getPackage().getName());
private ConnectionProxyHandler connectionProxyHandler;
private Connection connectionProxy;
@ -81,9 +84,10 @@ public abstract class AbstractStatementProxyHandler extends AbstractProxyHandler
return statement;
}
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
@Override
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
log.trace( "Handling invocation of statement method [{}]", methodName );
LOG.handlingInvocationOfStatementMethod(methodName);
// other methods allowed while invalid ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if ( "close".equals( methodName ) ) {
@ -165,4 +169,15 @@ public abstract class AbstractStatementProxyHandler extends AbstractProxyHandler
statement = null;
invalidate();
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Handling invocation of statement method [%s]" )
void handlingInvocationOfStatementMethod( String methodName );
}
}

View File

@ -32,14 +32,9 @@ import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.TransactionException;
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
import org.hibernate.stat.StatisticsImplementor;
@ -49,7 +44,9 @@ import org.hibernate.stat.StatisticsImplementor;
* @author Steve Ebersole
*/
public class ConnectionProxyHandler extends AbstractProxyHandler implements InvocationHandler, ConnectionObserver {
private static final Logger log = LoggerFactory.getLogger( ConnectionProxyHandler.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
AbstractProxyHandler.class.getPackage().getName());
private LogicalConnectionImplementor logicalConnection;
@ -102,9 +99,10 @@ public class ConnectionProxyHandler extends AbstractProxyHandler implements Invo
return logicalConnection.getResourceRegistry();
}
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
@Override
protected Object continueInvocation(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
log.trace( "Handling invocation of connection method [{}]", methodName );
LOG.handlingInvocationOfConnectionMethod(methodName);
// other methods allowed while invalid ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if ( "close".equals( methodName ) ) {
@ -198,7 +196,7 @@ public class ConnectionProxyHandler extends AbstractProxyHandler implements Invo
}
private void invalidateHandle() {
log.trace( "Invalidating connection handle" );
LOG.invalidatingConnectionHandle();
logicalConnection = null;
invalidate();
}
@ -213,14 +211,14 @@ public class ConnectionProxyHandler extends AbstractProxyHandler implements Invo
* {@inheritDoc}
*/
public void physicalConnectionReleased() {
log.info( "logical connection releasing its physical connection");
LOG.logicalConnectionReleasingPhysicalConnection();
}
/**
* {@inheritDoc}
*/
public void logicalConnectionClosed() {
log.info( "*** logical connection closed ***" );
LOG.logicalConnectionClosed();
invalidateHandle();
}
@ -228,4 +226,29 @@ public class ConnectionProxyHandler extends AbstractProxyHandler implements Invo
StatisticsImplementor getStatisticsImplementorOrNull() {
return getLogicalConnection().getStatisticsImplementor();
}
/**
* Interface defining messages that may be logged by the outer class
*/
/*
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Handling invocation of connection method [%s]" )
void handlingInvocationOfConnectionMethod( String methodName );
@LogMessage( level = TRACE )
@Message( value = "Invalidating connection handle" )
void invalidatingConnectionHandle();
@LogMessage( level = INFO )
@Message( value = "*** Logical connection closed ***" )
void logicalConnectionClosed();
@LogMessage( level = INFO )
@Message( value = "Logical connection releasing its physical connection" )
void logicalConnectionReleasingPhysicalConnection();
}
*/
}

View File

@ -23,18 +23,22 @@
*/
package org.hibernate.engine.jdbc.spi;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.ERROR;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLWarning;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.JDBCException;
import org.hibernate.exception.SQLExceptionConverter;
import org.hibernate.exception.SQLStateConverter;
import org.hibernate.exception.ViolatedConstraintNameExtracter;
import org.hibernate.util.StringHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Helper for handling SQLExceptions in various manners.
@ -42,7 +46,9 @@ import org.hibernate.util.StringHelper;
* @author Steve Ebersole
*/
public class SQLExceptionHelper {
private static final Logger log = LoggerFactory.getLogger( SQLExceptionHelper.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
SQLExceptionHelper.class.getPackage().getName());
public static final String DEFAULT_EXCEPTION_MSG = "SQL Exception";
public static final String DEFAULT_WARNING_MSG = "SQL Warning";
@ -123,13 +129,13 @@ public class SQLExceptionHelper {
* @param connection The connection to check for warnings.
*/
public void logAndClearWarnings(Connection connection) {
if ( log.isWarnEnabled() ) {
if (LOG.isEnabled(WARN)) {
try {
logWarnings( connection.getWarnings() );
}
catch ( SQLException sqle ) {
//workaround for WebLogic
log.debug( "could not log warnings", sqle );
LOG.unableToLogWarnings(sqle);
}
}
try {
@ -137,7 +143,7 @@ public class SQLExceptionHelper {
connection.clearWarnings();
}
catch ( SQLException sqle ) {
log.debug( "could not clear warnings", sqle );
LOG.unableToClearWarnings(sqle);
}
}
@ -158,19 +164,16 @@ public class SQLExceptionHelper {
* @param message The message text to use as a preamble.
*/
public void logWarnings(SQLWarning warning, String message) {
if ( log.isWarnEnabled() ) {
if ( log.isDebugEnabled() && warning != null ) {
message = StringHelper.isNotEmpty( message ) ? message : DEFAULT_WARNING_MSG;
log.debug( message, warning );
}
if (LOG.isEnabled(WARN)) {
if (warning != null) LOG.warningPreamble(StringHelper.isNotEmpty(message) ? message : DEFAULT_WARNING_MSG, warning);
while ( warning != null ) {
StringBuffer buf = new StringBuffer( 30 )
.append( "SQL Warning: " )
.append( warning.getErrorCode() )
.append( ", SQLState: " )
.append( warning.getSQLState() );
log.warn( buf.toString() );
log.warn( warning.getMessage() );
LOG.warningProperties(buf.toString());
LOG.warningMessage(warning.getMessage());
warning = warning.getNextWarning();
}
}
@ -192,21 +195,63 @@ public class SQLExceptionHelper {
* @param message The message text to use as a preamble.
*/
public void logExceptions(SQLException sqlException, String message) {
if ( log.isErrorEnabled() ) {
if ( log.isDebugEnabled() ) {
message = StringHelper.isNotEmpty( message ) ? message : DEFAULT_EXCEPTION_MSG;
log.debug( message, sqlException );
}
if (LOG.isEnabled(ERROR)) {
LOG.errorPreamble(StringHelper.isNotEmpty(message) ? message : DEFAULT_EXCEPTION_MSG, sqlException);
while ( sqlException != null ) {
StringBuffer buf = new StringBuffer( 30 )
.append( "SQL Error: " )
.append( sqlException.getErrorCode() )
.append( ", SQLState: " )
.append( sqlException.getSQLState() );
log.warn( buf.toString() );
log.error( sqlException.getMessage() );
LOG.errorProperties(buf.toString());
LOG.errorMessage(sqlException.getMessage());
sqlException = sqlException.getNextException();
}
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "On release of batch it still contained JDBC statements" )
void batchContainedStatementsOnRelease();
@LogMessage( level = ERROR )
@Message( value = "%s" )
void errorMessage( String message );
@LogMessage( level = DEBUG )
@Message( value = "%s : %s" )
void errorPreamble( String string,
SQLException sqlException );
@LogMessage( level = WARN )
@Message( value = "%s" )
void errorProperties( String string );
@LogMessage( level = DEBUG )
@Message( value = "Could not clear warnings : %s" )
void unableToClearWarnings( SQLException sqle );
@LogMessage( level = WARN )
@Message( value = "Could not log warnings : %s" )
void unableToLogWarnings( SQLException sqle );
@LogMessage( level = WARN )
@Message( value = "%s" )
void warningMessage( String message );
@LogMessage( level = DEBUG )
@Message( value = "%s : %s" )
void warningPreamble( String message,
SQLWarning warning );
@LogMessage( level = WARN )
@Message( value = "%s" )
void warningProperties( String string );
}
}

View File

@ -23,10 +23,12 @@
*/
package org.hibernate.engine.jdbc.spi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.DEBUG;
import org.hibernate.jdbc.util.FormatStyle;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Centralize logging for SQL statements.
@ -34,7 +36,9 @@ import org.hibernate.jdbc.util.FormatStyle;
* @author Steve Ebersole
*/
public class SQLStatementLogger {
private static final Logger log = LoggerFactory.getLogger( SQLStatementLogger.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
SQLStatementLogger.class.getPackage().getName());
private boolean logToStdout;
private boolean format;
@ -90,15 +94,20 @@ public class SQLStatementLogger {
*/
public void logStatement(String statement) {
// for now just assume a DML log for formatting
if ( format ) {
if ( logToStdout || log.isDebugEnabled() ) {
statement = FormatStyle.BASIC.getFormatter().format( statement );
}
}
log.debug( statement );
if ( logToStdout ) {
System.out.println( "Hibernate: " + statement );
}
if (format && (logToStdout || LOG.isDebugEnabled())) statement = FormatStyle.BASIC.getFormatter().format(statement);
LOG.statement(statement);
if (logToStdout) System.out.println("Hibernate: " + statement);
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "%s" )
void statement( String statement );
}
}

View File

@ -24,29 +24,32 @@
*/
package org.hibernate.engine.loading;
import java.sql.ResultSet;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.util.List;
import java.util.Iterator;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Set;
import java.util.HashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.EntityMode;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.hibernate.CacheMode;
import org.hibernate.cache.entry.CollectionCacheEntry;
import org.hibernate.EntityMode;
import org.hibernate.cache.CacheKey;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.engine.CollectionKey;
import org.hibernate.engine.Status;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.cache.entry.CollectionCacheEntry;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.engine.CollectionEntry;
import org.hibernate.engine.CollectionKey;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.Status;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Represents state associated with the processing of a given {@link ResultSet}
@ -59,7 +62,9 @@ import org.hibernate.engine.SessionFactoryImplementor;
* @author Steve Ebersole
*/
public class CollectionLoadContext {
private static final Logger log = LoggerFactory.getLogger( CollectionLoadContext.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
CollectionLoadContext.class.getPackage().getName());
private final LoadContexts loadContexts;
private final ResultSet resultSet;
@ -109,22 +114,16 @@ public class CollectionLoadContext {
public PersistentCollection getLoadingCollection(final CollectionPersister persister, final Serializable key) {
final EntityMode em = loadContexts.getPersistenceContext().getSession().getEntityMode();
final CollectionKey collectionKey = new CollectionKey( persister, key, em );
if ( log.isTraceEnabled() ) {
log.trace( "starting attempt to find loading collection [" + MessageHelper.collectionInfoString( persister.getRole(), key ) + "]" );
}
if (LOG.isTraceEnabled()) LOG.findingLoadingCollection(MessageHelper.collectionInfoString(persister.getRole(), key));
final LoadingCollectionEntry loadingCollectionEntry = loadContexts.locateLoadingCollectionEntry( collectionKey );
if ( loadingCollectionEntry == null ) {
// look for existing collection as part of the persistence context
PersistentCollection collection = loadContexts.getPersistenceContext().getCollection( collectionKey );
if ( collection != null ) {
if ( collection.wasInitialized() ) {
log.trace( "collection already initialized; ignoring" );
LOG.collectionAlreadyInitialized();
return null; // ignore this row of results! Note the early exit
}
else {
// initialize this collection
log.trace( "collection not yet initialized; initializing" );
}
} else LOG.collectionNotYetInitialized();
}
else {
Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister );
@ -134,14 +133,12 @@ public class CollectionLoadContext {
if ( newlySavedEntity ) {
// important, to account for newly saved entities in query
// todo : some kind of check for new status...
log.trace( "owning entity already loaded; ignoring" );
LOG.owningEntityAlreadyLoaded();
return null;
}
else {
// create one
if ( log.isTraceEnabled() ) {
log.trace( "instantiating new collection [key=" + key + ", rs=" + resultSet + "]" );
}
LOG.instantiatingNewCollection(key, resultSet);
collection = persister.getCollectionType()
.instantiate( loadContexts.getPersistenceContext().getSession(), persister, key );
}
@ -154,13 +151,13 @@ public class CollectionLoadContext {
}
else {
if ( loadingCollectionEntry.getResultSet() == resultSet ) {
log.trace( "found loading collection bound to current result set processing; reading row" );
LOG.foundLoadingCollection();
return loadingCollectionEntry.getCollection();
}
else {
// ignore this row, the collection is in process of
// being loaded somewhere further "up" the stack
log.trace( "collection is already being initialized; ignoring row" );
LOG.collectionAlreadyInitializing();
return null;
}
}
@ -191,9 +188,7 @@ public class CollectionLoadContext {
while ( iter.hasNext() ) {
final CollectionKey collectionKey = (CollectionKey) iter.next();
final LoadingCollectionEntry lce = loadContexts.locateLoadingCollectionEntry( collectionKey );
if ( lce == null) {
log.warn( "In CollectionLoadContext#endLoadingCollections, localLoadingCollectionKeys contained [" + collectionKey + "], but no LoadingCollectionEntry was found in loadContexts" );
}
if (lce == null) LOG.loadingCollectionKeyNotFound(collectionKey);
else if ( lce.getResultSet() == resultSet && lce.getPersister() == persister ) {
if ( matches == null ) {
matches = new ArrayList();
@ -205,9 +200,7 @@ public class CollectionLoadContext {
lce.getCollection()
);
}
if ( log.isTraceEnabled() ) {
log.trace( "removing collection load entry [" + lce + "]" );
}
LOG.removingCollectionLoadEntry(lce);
// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
loadContexts.unregisterLoadingCollectionXRef( collectionKey );
@ -229,31 +222,23 @@ public class CollectionLoadContext {
private void endLoadingCollections(CollectionPersister persister, List matchedCollectionEntries) {
if ( matchedCollectionEntries == null ) {
if ( log.isDebugEnabled() ) {
log.debug( "no collections were found in result set for role: " + persister.getRole() );
}
LOG.noCollectionFoundInResultSet(persister.getRole());
return;
}
final int count = matchedCollectionEntries.size();
if ( log.isDebugEnabled() ) {
log.debug( count + " collections were found in result set for role: " + persister.getRole() );
}
LOG.collectionsFound(count, persister.getRole());
for ( int i = 0; i < count; i++ ) {
LoadingCollectionEntry lce = ( LoadingCollectionEntry ) matchedCollectionEntries.get( i );
endLoadingCollection( lce, persister );
}
if ( log.isDebugEnabled() ) {
log.debug( count + " collections initialized for role: " + persister.getRole() );
}
LOG.collectionsInitialized(count, persister.getRole());
}
private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersister persister) {
if ( log.isTraceEnabled() ) {
log.debug( "ending loading collection [" + lce + "]" );
}
LOG.endingLoadingCollection(lce);
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
final EntityMode em = session.getEntityMode();
@ -279,10 +264,9 @@ public class CollectionLoadContext {
addCollectionToCache( lce, persister );
}
if ( log.isDebugEnabled() ) {
log.debug( "collection fully initialized: " + MessageHelper.collectionInfoString(persister, lce.getKey(), session.getFactory() ) );
}
if (LOG.isDebugEnabled()) LOG.collectionInitialized(MessageHelper.collectionInfoString(persister,
lce.getKey(),
session.getFactory()));
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
session.getFactory().getStatisticsImplementor().loadCollection( persister.getRole() );
}
@ -298,13 +282,11 @@ public class CollectionLoadContext {
final SessionImplementor session = getLoadContext().getPersistenceContext().getSession();
final SessionFactoryImplementor factory = session.getFactory();
if ( log.isDebugEnabled() ) {
log.debug( "Caching collection: " + MessageHelper.collectionInfoString( persister, lce.getKey(), factory ) );
}
if (LOG.isDebugEnabled()) LOG.cachingCollection(MessageHelper.collectionInfoString(persister, lce.getKey(), factory));
if ( !session.getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( session ) ) {
// some filters affecting the collection are enabled on the session, so do not do the put into the cache.
log.debug( "Refusing to add to cache due to enabled filters" );
LOG.notAddingToCache();
// todo : add the notion of enabled filters to the CacheKey to differentiate filtered collections from non-filtered;
// but CacheKey is currently used for both collections and entities; would ideally need to define two seperate ones;
// currently this works in conjuction with the check on
@ -344,15 +326,92 @@ public class CollectionLoadContext {
}
void cleanup() {
if ( !localLoadingCollectionKeys.isEmpty() ) {
log.warn( "On CollectionLoadContext#cleanup, localLoadingCollectionKeys contained [" + localLoadingCollectionKeys.size() + "] entries" );
}
if (!localLoadingCollectionKeys.isEmpty()) LOG.localLoadingCollectionKeysCount(localLoadingCollectionKeys.size());
loadContexts.cleanupCollectionXRefs( localLoadingCollectionKeys );
localLoadingCollectionKeys.clear();
}
public String toString() {
@Override
public String toString() {
return super.toString() + "<rs=" + resultSet + ">";
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "Caching collection: %s" )
void cachingCollection( String collectionInfoString );
@LogMessage( level = TRACE )
@Message( value = "Collection already initialized; ignoring" )
void collectionAlreadyInitialized();
@LogMessage( level = TRACE )
@Message( value = "Collection is already being initialized; ignoring row" )
void collectionAlreadyInitializing();
@LogMessage( level = DEBUG )
@Message( value = "Collection fully initialized: %s" )
void collectionInitialized( String collectionInfoString );
@LogMessage( level = TRACE )
@Message( value = "Collection not yet initialized; initializing" )
void collectionNotYetInitialized();
@LogMessage( level = DEBUG )
@Message( value = "%d collections were found in result set for role: %s" )
void collectionsFound( int count,
String role );
@LogMessage( level = DEBUG )
@Message( value = "%d collections initialized for role: %s" )
void collectionsInitialized( int count,
String role );
@LogMessage( level = TRACE )
@Message( value = "Ending loading collection [%s]" )
void endingLoadingCollection( LoadingCollectionEntry lce );
@LogMessage( level = TRACE )
@Message( value = "Starting attempt to find loading collection [%s]" )
void findingLoadingCollection( String collectionInfoString );
@LogMessage( level = TRACE )
@Message( value = "Found loading collection bound to current result set processing; reading row" )
void foundLoadingCollection();
@LogMessage( level = TRACE )
@Message( value = "Instantiating new collection [key=%s, rs=%s]" )
void instantiatingNewCollection( Serializable key,
ResultSet resultSet );
@LogMessage( level = WARN )
@Message( value = "In CollectionLoadContext#endLoadingCollections, localLoadingCollectionKeys contained [%s], but no LoadingCollectionEntry was found in loadContexts" )
void loadingCollectionKeyNotFound( CollectionKey collectionKey );
@LogMessage( level = WARN )
@Message( value = "On CollectionLoadContext#cleanup, localLoadingCollectionKeys contained [%d] entries" )
void localLoadingCollectionKeysCount( int size );
@LogMessage( level = DEBUG )
@Message( value = "No collections were found in result set for role: %s" )
void noCollectionFoundInResultSet( String role );
@LogMessage( level = DEBUG )
@Message( value = "Refusing to add to cache due to enabled filters" )
void notAddingToCache();
@LogMessage( level = TRACE )
@Message( value = "Owning entity already loaded; ignoring" )
void owningEntityAlreadyLoaded();
@LogMessage( level = TRACE )
@Message( value = "Removing collection load entry [%s]" )
void removingCollectionLoadEntry( LoadingCollectionEntry lce );
}
}

View File

@ -24,12 +24,14 @@
*/
package org.hibernate.engine.loading;
import static org.jboss.logging.Logger.Level.WARN;
import java.sql.ResultSet;
import java.util.List;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* {@inheritDoc}
@ -37,7 +39,9 @@ import org.slf4j.LoggerFactory;
* @author Steve Ebersole
*/
public class EntityLoadContext {
private static final Logger log = LoggerFactory.getLogger( EntityLoadContext.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
EntityLoadContext.class.getPackage().getName());
private final LoadContexts loadContexts;
private final ResultSet resultSet;
@ -49,15 +53,24 @@ public class EntityLoadContext {
}
void cleanup() {
if ( !hydratingEntities.isEmpty() ) {
log.warn( "On EntityLoadContext#clear, hydratingEntities contained [" + hydratingEntities.size() + "] entries" );
}
if (!hydratingEntities.isEmpty()) LOG.hydratingEntitiesCount(hydratingEntities.size());
hydratingEntities.clear();
}
public String toString() {
@Override
public String toString() {
return super.toString() + "<rs=" + resultSet + ">";
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = WARN )
@Message( value = "On EntityLoadContext#clear, hydratingEntities contained [%d] entries" )
void hydratingEntitiesCount( int size );
}
}

View File

@ -24,24 +24,26 @@
*/
package org.hibernate.engine.loading;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Iterator;
import java.util.HashMap;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.util.IdentityMap;
import org.hibernate.engine.PersistenceContext;
import org.hibernate.engine.CollectionKey;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.EntityMode;
import org.hibernate.collection.PersistentCollection;
import org.hibernate.engine.CollectionKey;
import org.hibernate.engine.PersistenceContext;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.EntityMode;
import org.hibernate.util.IdentityMap;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Maps {@link ResultSet result-sets} to specific contextual data
@ -60,7 +62,9 @@ import org.hibernate.EntityMode;
* @author Steve Ebersole
*/
public class LoadContexts {
private static final Logger log = LoggerFactory.getLogger( LoadContexts.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
LoadContexts.class.getPackage().getName());
private final PersistenceContext persistenceContext;
private Map collectionLoadContexts;
@ -129,7 +133,7 @@ public class LoadContexts {
Iterator itr = collectionLoadContexts.values().iterator();
while ( itr.hasNext() ) {
CollectionLoadContext collectionLoadContext = ( CollectionLoadContext ) itr.next();
log.warn( "fail-safe cleanup (collections) : " + collectionLoadContext );
LOG.failSafeCleanup(collectionLoadContext);
collectionLoadContext.cleanup();
}
collectionLoadContexts.clear();
@ -138,7 +142,7 @@ public class LoadContexts {
Iterator itr = entityLoadContexts.values().iterator();
while ( itr.hasNext() ) {
EntityLoadContext entityLoadContext = ( EntityLoadContext ) itr.next();
log.warn( "fail-safe cleanup (entities) : " + entityLoadContext );
LOG.failSafeCleanup(entityLoadContext);
entityLoadContext.cleanup();
}
entityLoadContexts.clear();
@ -187,9 +191,7 @@ public class LoadContexts {
context = ( CollectionLoadContext ) collectionLoadContexts.get( resultSet );
}
if ( context == null ) {
if ( log.isTraceEnabled() ) {
log.trace( "constructing collection load context for result set [" + resultSet + "]" );
}
LOG.constructingCollectionLoadContext(resultSet);
context = new CollectionLoadContext( this, resultSet );
collectionLoadContexts.put( resultSet, context );
}
@ -207,18 +209,16 @@ public class LoadContexts {
public PersistentCollection locateLoadingCollection(CollectionPersister persister, Serializable ownerKey) {
LoadingCollectionEntry lce = locateLoadingCollectionEntry( new CollectionKey( persister, ownerKey, getEntityMode() ) );
if ( lce != null ) {
if ( log.isTraceEnabled() ) {
log.trace( "returning loading collection:" + MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) );
}
if (LOG.isTraceEnabled()) LOG.returningLoadingCollection(MessageHelper.collectionInfoString(persister,
ownerKey,
getSession().getFactory()));
return lce.getCollection();
}
else {
// todo : should really move this log statement to CollectionType, where this is used from...
if ( log.isTraceEnabled() ) {
log.trace( "creating collection wrapper:" + MessageHelper.collectionInfoString( persister, ownerKey, getSession().getFactory() ) );
}
return null;
}
// TODO : should really move this log statement to CollectionType, where this is used from...
if (LOG.isTraceEnabled()) LOG.creatingCollectionWrapper(MessageHelper.collectionInfoString(persister,
ownerKey,
getSession().getFactory()));
return null;
}
// loading collection xrefs ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -286,18 +286,10 @@ public class LoadContexts {
if ( xrefLoadingCollectionEntries == null ) {
return null;
}
if ( log.isTraceEnabled() ) {
log.trace( "attempting to locate loading collection entry [" + key + "] in any result-set context" );
}
LOG.locatingLoadingCollectionEntry(key);
LoadingCollectionEntry rtn = ( LoadingCollectionEntry ) xrefLoadingCollectionEntries.get( key );
if ( log.isTraceEnabled() ) {
if ( rtn == null ) {
log.trace( "collection [" + key + "] not located in load context" );
}
else {
log.trace( "collection [" + key + "] located in load context" );
}
}
if (rtn == null) LOG.collectionNotLocated(key);
else LOG.collectionLocated(key);
return rtn;
}
@ -328,4 +320,42 @@ public class LoadContexts {
return context;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Collection [%s] located in load context" )
void collectionLocated( CollectionKey key );
@LogMessage( level = TRACE )
@Message( value = "Collection [%s] not located in load context" )
void collectionNotLocated( CollectionKey key );
@LogMessage( level = TRACE )
@Message( value = "Constructing collection load context for result set [%s]" )
void constructingCollectionLoadContext( ResultSet resultSet );
@LogMessage( level = TRACE )
@Message( value = "Creating collection wrapper: %s" )
void creatingCollectionWrapper( String collectionInfoString );
@LogMessage( level = WARN )
@Message( value = "Fail-safe cleanup (collections) : %s" )
void failSafeCleanup( CollectionLoadContext collectionLoadContext );
@LogMessage( level = WARN )
@Message( value = "Fail-safe cleanup (entities) : %s" )
void failSafeCleanup( EntityLoadContext entityLoadContext );
@LogMessage( level = TRACE )
@Message( value = "Attempting to locate loading collection entry [%s] in any result-set context" )
void locatingLoadingCollectionEntry( CollectionKey key );
@LogMessage( level = TRACE )
@Message( value = "Returning loading collection: %s" )
void returningLoadingCollection( String collectionInfoString );
}
}

View File

@ -23,15 +23,17 @@
*/
package org.hibernate.engine.profile;
import java.util.Map;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.type.Type;
import org.hibernate.type.BagType;
import org.hibernate.type.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* A 'fetch profile' allows a user to dynamically modify the fetching strategy used for particular associations at
@ -42,7 +44,9 @@ import org.hibernate.type.BagType;
* @author Steve Ebersole
*/
public class FetchProfile {
private static final Logger log = LoggerFactory.getLogger( FetchProfile.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
FetchProfile.class.getPackage().getName());
private final String name;
private Map<String,Fetch> fetches = new HashMap<String,Fetch>();
@ -92,7 +96,7 @@ public class FetchProfile {
public void addFetch(Fetch fetch) {
Type associationType = fetch.getAssociation().getOwner().getPropertyType( fetch.getAssociation().getAssociationPath() );
if ( associationType.isCollectionType() ) {
log.trace( "handling request to add collection fetch [{}]", fetch.getAssociation().getRole() );
LOG.addingFetch(fetch.getAssociation().getRole());
// couple of things for which to account in the case of collection
// join fetches
@ -101,7 +105,7 @@ public class FetchProfile {
// processed collection join fetches
if ( BagType.class.isInstance( associationType ) ) {
if ( containsJoinFetchedCollection ) {
log.warn( "Ignoring bag join fetch [{}] due to prior collection join fetch", fetch.getAssociation().getRole() );
LOG.containsJoinFetchedCollection(fetch.getAssociation().getRole());
return; // EARLY EXIT!!!
}
}
@ -110,10 +114,8 @@ public class FetchProfile {
// fetch where we had already added a bag join fetch previously,
// we need to go back and ignore that previous bag join fetch.
if ( containsJoinFetchedBag ) {
if ( fetches.remove( bagJoinFetch.getAssociation().getRole() ) != bagJoinFetch ) {
// just for safety...
log.warn( "Unable to erase previously added bag join fetch" );
}
// just for safety...
if (fetches.remove(bagJoinFetch.getAssociation().getRole()) != bagJoinFetch) LOG.unableToRemoveBagJoinFetch();
bagJoinFetch = null;
containsJoinFetchedBag = false;
}
@ -168,4 +170,23 @@ public class FetchProfile {
public boolean isContainsJoinFetchedBag() {
return containsJoinFetchedBag;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Handling request to add collection fetch [%s]" )
void addingFetch( String role );
@LogMessage( level = WARN )
@Message( value = "Ignoring bag join fetch [%s] due to prior collection join fetch" )
void containsJoinFetchedCollection( String role );
@LogMessage( level = WARN )
@Message( value = "Unable to erase previously added bag join fetch" )
void unableToRemoveBagJoinFetch();
}
}

View File

@ -24,34 +24,37 @@
*/
package org.hibernate.engine.query;
import org.hibernate.hql.QuerySplitter;
import org.hibernate.hql.QueryTranslator;
import org.hibernate.hql.ParameterTranslations;
import org.hibernate.hql.FilterTranslator;
import org.hibernate.util.ArrayHelper;
import org.hibernate.util.EmptyIterator;
import org.hibernate.util.JoinedIterator;
import org.hibernate.util.IdentitySet;
import org.hibernate.HibernateException;
import org.hibernate.ScrollableResults;
import org.hibernate.QueryException;
import org.hibernate.type.Type;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.QueryParameters;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.RowSelection;
import org.hibernate.event.EventSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.jboss.logging.Logger.Level.TRACE;
import static org.jboss.logging.Logger.Level.WARN;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.HashSet;
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.HashMap;
import org.hibernate.HibernateException;
import org.hibernate.QueryException;
import org.hibernate.ScrollableResults;
import org.hibernate.engine.QueryParameters;
import org.hibernate.engine.RowSelection;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.event.EventSource;
import org.hibernate.hql.FilterTranslator;
import org.hibernate.hql.ParameterTranslations;
import org.hibernate.hql.QuerySplitter;
import org.hibernate.hql.QueryTranslator;
import org.hibernate.type.Type;
import org.hibernate.util.ArrayHelper;
import org.hibernate.util.EmptyIterator;
import org.hibernate.util.IdentitySet;
import org.hibernate.util.JoinedIterator;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Defines a query execution plan for an HQL query (or filter).
@ -60,9 +63,10 @@ import java.util.HashMap;
*/
public class HQLQueryPlan implements Serializable {
// TODO : keep seperate notions of QT[] here for shallow/non-shallow queries...
// TODO : keep separate notions of QT[] here for shallow/non-shallow queries...
private static final Logger log = LoggerFactory.getLogger( HQLQueryPlan.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
HQLQueryPlan.class.getPackage().getName());
private final String sourceQuery;
private final QueryTranslator[] translators;
@ -170,8 +174,8 @@ public class HQLQueryPlan implements Serializable {
public List performList(
QueryParameters queryParameters,
SessionImplementor session) throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "find: " + getSourceQuery() );
if (LOG.isTraceEnabled()) {
LOG.find(getSourceQuery());
queryParameters.traceParameters( session.getFactory() );
}
boolean hasLimit = queryParameters.getRowSelection() != null &&
@ -179,7 +183,7 @@ public class HQLQueryPlan implements Serializable {
boolean needsLimit = hasLimit && translators.length > 1;
QueryParameters queryParametersToUse;
if ( needsLimit ) {
log.warn( "firstResult/maxResults specified on polymorphic query; applying in memory!" );
LOG.needsLimit();
RowSelection selection = new RowSelection();
selection.setFetchSize( queryParameters.getRowSelection().getFetchSize() );
selection.setTimeout( queryParameters.getRowSelection().getTimeout() );
@ -229,8 +233,8 @@ public class HQLQueryPlan implements Serializable {
public Iterator performIterate(
QueryParameters queryParameters,
EventSource session) throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "iterate: " + getSourceQuery() );
if (LOG.isTraceEnabled()) {
LOG.iterate(getSourceQuery());
queryParameters.traceParameters( session.getFactory() );
}
if ( translators.length == 0 ) {
@ -255,8 +259,8 @@ public class HQLQueryPlan implements Serializable {
public ScrollableResults performScroll(
QueryParameters queryParameters,
SessionImplementor session) throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "iterate: " + getSourceQuery() );
if (LOG.isTraceEnabled()) {
LOG.iterate(getSourceQuery());
queryParameters.traceParameters( session.getFactory() );
}
if ( translators.length != 1 ) {
@ -271,13 +275,11 @@ public class HQLQueryPlan implements Serializable {
public int performExecuteUpdate(QueryParameters queryParameters, SessionImplementor session)
throws HibernateException {
if ( log.isTraceEnabled() ) {
log.trace( "executeUpdate: " + getSourceQuery() );
if (LOG.isTraceEnabled()) {
LOG.executeUpdate(getSourceQuery());
queryParameters.traceParameters( session.getFactory() );
}
if ( translators.length != 1 ) {
log.warn( "manipulation query [" + getSourceQuery() + "] resulted in [" + translators.length + "] split queries" );
}
if (translators.length != 1) LOG.splitQueries(getSourceQuery(), translators.length);
int result = 0;
for ( int i = 0; i < translators.length; i++ ) {
result += translators[i].executeUpdate( queryParameters, session );
@ -289,9 +291,7 @@ public class HQLQueryPlan implements Serializable {
long start = System.currentTimeMillis();
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( hql );
long end = System.currentTimeMillis();
if ( log.isTraceEnabled() ) {
log.trace( "HQL param location recognition took " + (end - start) + " mills (" + hql + ")" );
}
LOG.hqlParamLocationRecognition(end - start, hql);
int ordinalParamCount = parameterTranslations.getOrdinalParameterCount();
int[] locations = ArrayHelper.toIntArray( recognizer.getOrdinalParameterLocationList() );
@ -336,4 +336,37 @@ public class HQLQueryPlan implements Serializable {
System.arraycopy(translators, 0, copy, 0, copy.length);
return copy;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Execute update: %s" )
void executeUpdate( String sourceQuery );
@LogMessage( level = TRACE )
@Message( value = "Find: %s" )
void find( String sourceQuery );
@LogMessage( level = TRACE )
@Message( value = "HQL param location recognition took %d mills (%s)" )
void hqlParamLocationRecognition( long l,
String hql );
@LogMessage( level = TRACE )
@Message( value = "Iterate: %s" )
void iterate( String sourceQuery );
@LogMessage( level = WARN )
@Message( value = "FirstResult/maxResults specified on polymorphic query; applying in memory!" )
void needsLimit();
@LogMessage( level = WARN )
@Message( value = "Manipulation query [%s] resulted in [%d] split queries" )
void splitQueries( String sourceQuery,
int length );
}
}

View File

@ -24,27 +24,29 @@
*/
package org.hibernate.engine.query;
import static org.jboss.logging.Logger.Level.DEBUG;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.QueryException;
import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
import org.hibernate.action.BulkOperationCleanupAction;
import org.hibernate.engine.QueryParameters;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.TypedValue;
import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
import org.hibernate.event.EventSource;
import org.hibernate.loader.custom.sql.SQLCustomQuery;
import org.hibernate.type.Type;
import org.hibernate.util.ArrayHelper;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Defines a query execution plan for a native-SQL query.
@ -56,7 +58,8 @@ public class NativeSQLQueryPlan implements Serializable {
private final SQLCustomQuery customQuery;
private static final Logger log = LoggerFactory.getLogger(NativeSQLQueryPlan.class);
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
NativeSQLQueryPlan.class.getPackage().getName());
public NativeSQLQueryPlan(
NativeSQLQuerySpecification specification,
@ -154,11 +157,7 @@ public class NativeSQLQueryPlan implements Serializable {
TypedValue typedval = (TypedValue) e.getValue();
int[] locs = getNamedParameterLocs( name );
for (int i = 0; i < locs.length; i++) {
if ( log.isDebugEnabled() ) {
log.debug( "bindNamedParameters() "
+ typedval.getValue() + " -> " + name + " ["
+ (locs[i] + start ) + "]" );
}
LOG.bindNamedParameters(typedval.getValue(), name, locs[i] + start);
typedval.getType().nullSafeSet( ps, typedval.getValue(),
locs[i] + start, session );
}
@ -222,4 +221,16 @@ public class NativeSQLQueryPlan implements Serializable {
return result;
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = DEBUG )
@Message( value = "bindNamedParameters() %s -> %s [%d]" )
void bindNamedParameters( Object value,
String name,
int i );
}
}

View File

@ -23,28 +23,29 @@
*/
package org.hibernate.engine.query;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.SimpleMRUCache;
import org.hibernate.util.SoftLimitMRUCache;
import org.hibernate.util.CollectionHelper;
import static org.jboss.logging.Logger.Level.TRACE;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.hibernate.MappingException;
import org.hibernate.QueryException;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
import org.hibernate.QueryException;
import org.hibernate.MappingException;
import org.hibernate.impl.FilterImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.Map;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import java.util.HashSet;
import java.util.Collections;
import java.util.Collection;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.util.CollectionHelper;
import org.hibernate.util.SimpleMRUCache;
import org.hibernate.util.SoftLimitMRUCache;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Acts as a cache for compiled query plans, as well as query-parameter metadata.
@ -56,7 +57,8 @@ import java.util.Collection;
*/
public class QueryPlanCache implements Serializable {
private static final Logger log = LoggerFactory.getLogger( QueryPlanCache.class );
private static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class,
QueryPlanCache.class.getPackage().getName());
private SessionFactoryImplementor factory;
@ -118,16 +120,9 @@ public class QueryPlanCache implements Serializable {
HQLQueryPlan plan = ( HQLQueryPlan ) planCache.get ( key );
if ( plan == null ) {
if ( log.isTraceEnabled() ) {
log.trace( "unable to locate HQL query plan in cache; generating (" + queryString + ")" );
}
LOG.unableToLocateHqlQuery(queryString);
plan = new HQLQueryPlan(queryString, shallow, enabledFilters, factory );
}
else {
if ( log.isTraceEnabled() ) {
log.trace( "located HQL query plan in cache (" + queryString + ")" );
}
}
} else LOG.locatedHqlQuery(queryString);
planCache.put( key, plan );
@ -140,16 +135,9 @@ public class QueryPlanCache implements Serializable {
FilterQueryPlan plan = ( FilterQueryPlan ) planCache.get ( key );
if ( plan == null ) {
if ( log.isTraceEnabled() ) {
log.trace( "unable to locate collection-filter query plan in cache; generating (" + collectionRole + " : " + filterString + ")" );
}
LOG.unableToLocateCollectionFilter(collectionRole, filterString);
plan = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters, factory );
}
else {
if ( log.isTraceEnabled() ) {
log.trace( "located collection-filter query plan in cache (" + collectionRole + " : " + filterString + ")" );
}
}
} else LOG.locatedCollectionFilter(collectionRole, filterString);
planCache.put( key, plan );
@ -160,16 +148,9 @@ public class QueryPlanCache implements Serializable {
NativeSQLQueryPlan plan = ( NativeSQLQueryPlan ) planCache.get( spec );
if ( plan == null ) {
if ( log.isTraceEnabled() ) {
log.trace( "unable to locate native-sql query plan in cache; generating (" + spec.getQueryString() + ")" );
}
LOG.unableToLocationNativeSqlQueryPlan(spec.getQueryString());
plan = new NativeSQLQueryPlan( spec, factory );
}
else {
if ( log.isTraceEnabled() ) {
log.trace( "located native-sql query plan in cache (" + spec.getQueryString() + ")" );
}
}
} else LOG.locatedNativeSqlQueryPlan(spec.getQueryString());
planCache.put( spec, plan );
return plan;
@ -232,7 +213,8 @@ public class QueryPlanCache implements Serializable {
this.hashCode = hash;
}
public boolean equals(Object o) {
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
@ -248,7 +230,8 @@ public class QueryPlanCache implements Serializable {
}
public int hashCode() {
@Override
public int hashCode() {
return hashCode;
}
}
@ -288,7 +271,8 @@ public class QueryPlanCache implements Serializable {
this.hashCode = hash;
}
public boolean equals(Object o) {
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
@ -303,7 +287,8 @@ public class QueryPlanCache implements Serializable {
}
public int hashCode() {
@Override
public int hashCode() {
return hashCode;
}
}
@ -337,7 +322,8 @@ public class QueryPlanCache implements Serializable {
this.hashCode = hash;
}
public boolean equals(Object o) {
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
@ -354,8 +340,42 @@ public class QueryPlanCache implements Serializable {
}
public int hashCode() {
@Override
public int hashCode() {
return hashCode;
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = TRACE )
@Message( value = "Located collection-filter query plan in cache (%s : %s)" )
void locatedCollectionFilter( String collectionRole,
String filterString );
@LogMessage( level = TRACE )
@Message( value = "Located HQL query plan in cache (%s)" )
void locatedHqlQuery( String queryString );
@LogMessage( level = TRACE )
@Message( value = "Located native-sql query plan in cache (%s)" )
void locatedNativeSqlQueryPlan( String queryString );
@LogMessage( level = TRACE )
@Message( value = "Unable to locate collection-filter query plan in cache; generating (%s : %s)" )
void unableToLocateCollectionFilter( String collectionRole,
String filterString );
@LogMessage( level = TRACE )
@Message( value = "Unable to locate HQL query plan in cache; generating (%s)" )
void unableToLocateHqlQuery( String queryString );
@LogMessage( level = TRACE )
@Message( value = "Unable to locate native-sql query plan in cache; generating (%s)" )
void unableToLocationNativeSqlQueryPlan( String queryString );
}
}

View File

@ -24,20 +24,24 @@
*/
package org.hibernate.engine.transaction;
import static org.jboss.logging.Logger.Level.DEBUG;
import static org.jboss.logging.Logger.Level.INFO;
import static org.jboss.logging.Logger.Level.TRACE;
import java.sql.Connection;
import java.sql.SQLException;
import javax.transaction.NotSupportedException;
import javax.transaction.SystemException;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import javax.transaction.SystemException;
import javax.transaction.NotSupportedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.HibernateException;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
import org.hibernate.exception.JDBCExceptionHelper;
import org.hibernate.exception.SQLExceptionConverter;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.LogMessage;
import org.jboss.logging.Message;
import org.jboss.logging.MessageLogger;
/**
* Class which provides the isolation semantics required by
@ -52,7 +56,7 @@ import org.hibernate.exception.SQLExceptionConverter;
*/
public class Isolater {
private static final Logger log = LoggerFactory.getLogger( Isolater.class );
static final Logger LOG = org.jboss.logging.Logger.getMessageLogger(Logger.class, Isolater.class.getPackage().getName());
/**
* Ensures that all processing actually performed by the given work will
@ -116,9 +120,7 @@ public class Isolater {
try {
// First we suspend any current JTA transaction
Transaction surroundingTransaction = transactionManager.suspend();
if ( log.isDebugEnabled() ) {
log.debug( "surrounding JTA transaction suspended [" + surroundingTransaction + "]" );
}
LOG.jtaTransactionSuspended(surroundingTransaction);
boolean hadProblems = false;
try {
@ -137,9 +139,7 @@ public class Isolater {
finally {
try {
transactionManager.resume( surroundingTransaction );
if ( log.isDebugEnabled() ) {
log.debug( "surrounding JTA transaction resumed [" + surroundingTransaction + "]" );
}
LOG.jtaTransactionResumed(surroundingTransaction);
}
catch( Throwable t ) {
// if the actually work had an error use that, otherwise error based on t
@ -170,7 +170,7 @@ public class Isolater {
transactionManager.rollback();
}
catch ( Exception ignore ) {
log.info( "Unable to rollback isolated transaction on error [" + e + "] : [" + ignore + "]" );
LOG.unableToRollbackIsolatedTransaction(e, ignore);
}
}
}
@ -206,7 +206,7 @@ public class Isolater {
session.getFactory().getConnectionProvider().closeConnection( connection );
}
catch ( Throwable ignore ) {
log.info( "Unable to release isolated connection [" + ignore + "]" );
LOG.unableToReleaseIsolatedConnection(ignore);
}
}
}
@ -259,7 +259,7 @@ public class Isolater {
}
}
catch( Exception ignore ) {
log.info( "unable to rollback connection on exception [" + ignore + "]" );
LOG.unableToRollbackConnection(ignore);
}
if ( e instanceof HibernateException ) {
@ -281,14 +281,14 @@ public class Isolater {
connection.setAutoCommit( true );
}
catch( Exception ignore ) {
log.trace( "was unable to reset connection back to auto-commit" );
LOG.unableToResetConnectionToAutoCommit();
}
}
try {
session.getFactory().getConnectionProvider().closeConnection( connection );
}
catch ( Exception ignore ) {
log.info( "Unable to release isolated connection [" + ignore + "]" );
LOG.unableToReleaseIsolatedConnection(ignore);
}
}
}
@ -304,4 +304,40 @@ public class Isolater {
return session.getFactory().getSQLExceptionHelper();
}
}
/**
* Interface defining messages that may be logged by the outer class
*/
@MessageLogger
interface Logger extends BasicLogger {
@LogMessage( level = INFO )
@Message( value = "On release of batch it still contained JDBC statements" )
void batchContainedStatementsOnRelease();
@LogMessage( level = DEBUG )
@Message( value = "Surrounding JTA transaction resumed [%s]" )
void jtaTransactionResumed( Transaction surroundingTransaction );
@LogMessage( level = DEBUG )
@Message( value = "Surrounding JTA transaction suspended [%s]" )
void jtaTransactionSuspended( Transaction surroundingTransaction );
@LogMessage( level = INFO )
@Message( value = "Unable to release isolated connection [%s]" )
void unableToReleaseIsolatedConnection( Throwable ignore );
@LogMessage( level = TRACE )
@Message( value = "Unable to reset connection back to auto-commit" )
void unableToResetConnectionToAutoCommit();
@LogMessage( level = INFO )
@Message( value = "Unable to rollback connection on exception [%s]" )
void unableToRollbackConnection( Exception ignore );
@LogMessage( level = INFO )
@Message( value = "Unable to rollback isolated transaction on error [%s] : [%s]" )
void unableToRollbackIsolatedTransaction( Exception e,
Exception ignore );
}
}

Some files were not shown because too many files have changed in this diff Show More