HHH-6967 - HHH-6906 - Clean up javadoc warnings

This commit is contained in:
Steve Ebersole 2011-12-20 12:02:59 -06:00
parent 5e8b74d8e8
commit e75b8a77b1
56 changed files with 1121 additions and 806 deletions

View File

@ -246,7 +246,7 @@ public interface Criteria extends CriteriaSpecification {
* @return this (for method chaining)
*
* @throws HibernateException Indicates a problem creating the sub criteria
* @deprecated use {@link #createAlias(String, String, JoinType, Criterion}
* @deprecated use {@link #createAlias(String, String, JoinType, Criterion)}
*/
@Deprecated
public Criteria createAlias(String associationPath, String alias, int joinType, Criterion withClause) throws HibernateException;
@ -453,6 +453,7 @@ public interface Criteria extends CriteriaSpecification {
*
* @param readOnly true, entities and proxies loaded by the criteria will be put in read-only mode
* false, entities and proxies loaded by the criteria will be put in modifiable mode
* @return {@code this}, for method chaining
*/
public Criteria setReadOnly(boolean readOnly);

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate;
import java.io.Serializable;
import java.util.Iterator;
@ -45,9 +45,11 @@ import org.hibernate.type.Type;
* Instead of implementing this interface directly, it is usually better to extend <tt>EmptyInterceptor</tt>
* and override only the callback methods of interest.
*
* @see SessionFactory#openSession(Interceptor)
* @see SessionBuilder#interceptor(Interceptor)
* @see SharedSessionBuilder#interceptor()
* @see org.hibernate.cfg.Configuration#setInterceptor(Interceptor)
* @see EmptyInterceptor
*
* @author Gavin King
*/
public interface Interceptor {
@ -55,52 +57,122 @@ public interface Interceptor {
* Called just before an object is initialized. The interceptor may change the <tt>state</tt>, which will
* be propagated to the persistent object. Note that when this method is called, <tt>entity</tt> will be
* an empty uninitialized instance of the class.
* <p/>
* NOTE: The indexes across the <tt>state</tt>, <tt>propertyNames</tt> and <tt>types</tt> arrays match.
*
* @return <tt>true</tt> if the user modified the <tt>state</tt> in any way.
* @param entity The entity instance being loaded
* @param id The identifier value being loaded
* @param state The entity state (which will be pushed into the entity instance)
* @param propertyNames The names of the entity properties, corresponding to the <tt>state</tt>.
* @param types The types of the entity properties, corresponding to the <tt>state</tt>.
*
* @return {@code true} if the user modified the <tt>state</tt> in any way.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public boolean onLoad(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
/**
* Called when an object is detected to be dirty, during a flush. The interceptor may modify the detected
* <tt>currentState</tt>, which will be propagated to both the database and the persistent object.
* Note that not all flushes end in actual synchronization with the database, in which case the
* new <tt>currentState</tt> will be propagated to the object, but not necessarily (immediately) to
* the database. It is strongly recommended that the interceptor <b>not</b> modify the <tt>previousState</tt>.
* <p/>
* NOTE: The indexes across the <tt>currentState</tt>, <tt>previousState</tt>, <tt>propertyNames</tt> and
* <tt>types</tt> arrays match.
*
* @return <tt>true</tt> if the user modified the <tt>currentState</tt> in any way.
* @param entity The entity instance detected as being dirty and being flushed
* @param id The identifier of the entity
* @param currentState The entity's current state
* @param previousState The entity's previous (load time) state.
* @param propertyNames The names of the entity properties
* @param types The types of the entity properties
*
* @return {@code true} if the user modified the <tt>currentState</tt> in any way.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) throws CallbackException;
/**
* Called before an object is saved. The interceptor may modify the <tt>state</tt>, which will be used for
* the SQL <tt>INSERT</tt> and propagated to the persistent object.
*
* @param entity The entity instance whose state is being inserted
* @param id The identifier of the entity
* @param state The state of the entity which will be inserted
* @param propertyNames The names of the entity properties.
* @param types The types of the entity properties
*
* @return <tt>true</tt> if the user modified the <tt>state</tt> in any way.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public boolean onSave(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
/**
* Called before an object is deleted. It is not recommended that the interceptor modify the <tt>state</tt>.
*
* @param entity The entity instance being deleted
* @param id The identifier of the entity
* @param state The state of the entity
* @param propertyNames The names of the entity properties.
* @param types The types of the entity properties
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void onDelete(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
/**
* Called before a collection is (re)created.
*
* @param collection The collection instance.
* @param key The collection key value.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void onCollectionRecreate(Object collection, Serializable key) throws CallbackException;
/**
* Called before a collection is deleted.
*
* @param collection The collection instance.
* @param key The collection key value.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void onCollectionRemove(Object collection, Serializable key) throws CallbackException;
/**
* Called before a collection is updated.
*
* @param collection The collection instance.
* @param key The collection key value.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void onCollectionUpdate(Object collection, Serializable key) throws CallbackException;
/**
* Called before a flush
*
* @param entities The entities to be flushed
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void preFlush(Iterator entities) throws CallbackException;
/**
* Called after a flush that actually ends in execution of the SQL statements required to synchronize
* in-memory state with the database.
*
* @param entities The entities that were flushed.
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public void postFlush(Iterator entities) throws CallbackException;
/**
* Called to distinguish between transient and detached entities. The return value determines the
* state of the entity with respect to the current session.
@ -114,6 +186,7 @@ public interface Interceptor {
* @return Boolean or <tt>null</tt> to choose default behaviour
*/
public Boolean isTransient(Object entity);
/**
* Called from <tt>flush()</tt>. The return value determines whether the entity is updated
* <ul>
@ -121,8 +194,17 @@ public interface Interceptor {
* <li>an empty array - the entity is not dirty
* <li><tt>null</tt> - use Hibernate's default dirty-checking algorithm
* </ul>
* @param entity a persistent entity
* @return array of dirty property indices or <tt>null</tt> to choose default behaviour
*
* @param entity The entity for which to find dirty properties.
* @param id The identifier of the entity
* @param currentState The current entity state as taken from the entity instance
* @param previousState The state of the entity when it was last synchronized (generally when it was loaded)
* @param propertyNames The names of the entity properties.
* @param types The types of the entity properties
*
* @return array of dirty property indices or {@code null} to indicate Hibernate should perform default behaviour
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public int[] findDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types);
/**
@ -134,6 +216,8 @@ public interface Interceptor {
* @param entityMode The type of entity instance to be returned.
* @param id the identifier of the new instance
* @return an instance of the class, or <tt>null</tt> to choose default behaviour
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public Object instantiate(String entityName, EntityMode entityMode, Serializable id) throws CallbackException;
@ -141,6 +225,8 @@ public interface Interceptor {
* Get the entity name for a persistent or transient instance
* @param object an entity instance
* @return the name of the entity
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public String getEntityName(Object object) throws CallbackException;
@ -149,7 +235,8 @@ public interface Interceptor {
* @param entityName the name of the entity
* @param id the instance identifier
* @return a fully initialized entity
* @throws CallbackException
*
* @throws CallbackException Thrown if the interceptor encounters any problems handling the callback.
*/
public Object getEntity(String entityName, Serializable id) throws CallbackException;
@ -157,14 +244,22 @@ public interface Interceptor {
* Called when a Hibernate transaction is begun via the Hibernate <tt>Transaction</tt>
* API. Will not be called if transactions are being controlled via some other
* mechanism (CMT, for example).
*
* @param tx The Hibernate transaction facade object
*/
public void afterTransactionBegin(Transaction tx);
/**
* Called before a transaction is committed (but not before rollback).
*
* @param tx The Hibernate transaction facade object
*/
public void beforeTransactionCompletion(Transaction tx);
/**
* Called after a transaction is committed or rolled back.
*
* @param tx The Hibernate transaction facade object
*/
public void afterTransactionCompletion(Transaction tx);

View File

@ -251,7 +251,7 @@ public interface Session extends SharedSessionContract {
* @throws TransientObjectException if the instance is transient or associated with
* a different session
*/
public Serializable getIdentifier(Object object) throws HibernateException;
public Serializable getIdentifier(Object object);
/**
* Check if this instance is associated with this <tt>Session</tt>.
@ -267,9 +267,8 @@ public interface Session extends SharedSessionContract {
* instances if the association is mapped with <tt>cascade="evict"</tt>.
*
* @param object a persistent instance
* @throws HibernateException
*/
public void evict(Object object) throws HibernateException;
public void evict(Object object);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -278,12 +277,13 @@ public interface Session extends SharedSessionContract {
* @param theClass a persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @param lockMode the lock level
*
* @return the persistent instance or proxy
* @throws HibernateException
*
* @deprecated LockMode parameter should be replaced with LockOptions
*/
@Deprecated
public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException;
public Object load(Class theClass, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -293,9 +293,8 @@ public interface Session extends SharedSessionContract {
* @param id a valid identifier of an existing persistent instance of the class
* @param lockOptions contains the lock level
* @return the persistent instance or proxy
* @throws HibernateException
*/
public Object load(Class theClass, Serializable id, LockOptions lockOptions) throws HibernateException;
public Object load(Class theClass, Serializable id, LockOptions lockOptions);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -304,12 +303,13 @@ public interface Session extends SharedSessionContract {
* @param entityName a persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @param lockMode the lock level
*
* @return the persistent instance or proxy
* @throws HibernateException
*
* @deprecated LockMode parameter should be replaced with LockOptions
*/
@Deprecated
public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
public Object load(String entityName, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -318,10 +318,10 @@ public interface Session extends SharedSessionContract {
* @param entityName a persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @param lockOptions contains the lock level
*
* @return the persistent instance or proxy
* @throws HibernateException
*/
public Object load(String entityName, Serializable id, LockOptions lockOptions) throws HibernateException;
public Object load(String entityName, Serializable id, LockOptions lockOptions);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -334,10 +334,10 @@ public interface Session extends SharedSessionContract {
*
* @param theClass a persistent class
* @param id a valid identifier of an existing persistent instance of the class
*
* @return the persistent instance or proxy
* @throws HibernateException
*/
public Object load(Class theClass, Serializable id) throws HibernateException;
public Object load(Class theClass, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -350,10 +350,10 @@ public interface Session extends SharedSessionContract {
*
* @param entityName a persistent class
* @param id a valid identifier of an existing persistent instance of the class
*
* @return the persistent instance or proxy
* @throws HibernateException
*/
public Object load(String entityName, Serializable id) throws HibernateException;
public Object load(String entityName, Serializable id);
/**
* Read the persistent state associated with the given identifier into the given transient
@ -361,51 +361,54 @@ public interface Session extends SharedSessionContract {
*
* @param object an "empty" instance of the persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @throws HibernateException
*/
public void load(Object object, Serializable id) throws HibernateException;
public void load(Object object, Serializable id);
/**
* Persist the state of the given detached instance, reusing the current
* identifier value. This operation cascades to associated instances if
* the association is mapped with <tt>cascade="replicate"</tt>.
* the association is mapped with {@code cascade="replicate"}
*
* @param object a detached instance of a persistent class
* @param replicationMode The replication mode to use
*/
public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException;
public void replicate(Object object, ReplicationMode replicationMode);
/**
* Persist the state of the given detached instance, reusing the current
* identifier value. This operation cascades to associated instances if
* the association is mapped with <tt>cascade="replicate"</tt>.
* the association is mapped with {@code cascade="replicate"}
*
* @param entityName The entity name
* @param object a detached instance of a persistent class
* @param replicationMode The replication mode to use
*/
public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException;
public void replicate(String entityName, Object object, ReplicationMode replicationMode) ;
/**
* Persist the given transient instance, first assigning a generated identifier. (Or
* using the current value of the identifier property if the <tt>assigned</tt>
* generator is used.) This operation cascades to associated instances if the
* association is mapped with <tt>cascade="save-update"</tt>.
* association is mapped with {@code cascade="save-update"}
*
* @param object a transient instance of a persistent class
*
* @return the generated identifier
* @throws HibernateException
*/
public Serializable save(Object object) throws HibernateException;
public Serializable save(Object object);
/**
* Persist the given transient instance, first assigning a generated identifier. (Or
* using the current value of the identifier property if the <tt>assigned</tt>
* generator is used.) This operation cascades to associated instances if the
* association is mapped with <tt>cascade="save-update"</tt>.
* association is mapped with {@code cascade="save-update"}
*
* @param entityName The entity name
* @param object a transient instance of a persistent class
*
* @return the generated identifier
* @throws HibernateException
*/
public Serializable save(String entityName, Object object) throws HibernateException;
public Serializable save(String entityName, Object object);
/**
* Either {@link #save(Object)} or {@link #update(Object)} the given
@ -413,14 +416,14 @@ public interface Session extends SharedSessionContract {
* manual for discussion of unsaved-value checking).
* <p/>
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="save-update"</tt>.
* with {@code cascade="save-update"}
*
* @param object a transient or detached instance containing new or updated state
*
* @see Session#save(java.lang.Object)
* @see Session#update(Object object)
* @param object a transient or detached instance containing new or updated state
* @throws HibernateException
*/
public void saveOrUpdate(Object object) throws HibernateException;
public void saveOrUpdate(Object object);
/**
* Either {@link #save(String, Object)} or {@link #update(String, Object)}
@ -428,36 +431,36 @@ public interface Session extends SharedSessionContract {
* (see the manual for discussion of unsaved-value checking).
* <p/>
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="save-update"</tt>.
* with {@code cascade="save-update"}
*
* @param entityName The entity name
* @param object a transient or detached instance containing new or updated state
*
* @see Session#save(String,Object)
* @see Session#update(String,Object)
* @param object a transient or detached instance containing new or updated state
* @throws HibernateException
*/
public void saveOrUpdate(String entityName, Object object) throws HibernateException;
public void saveOrUpdate(String entityName, Object object);
/**
* Update the persistent instance with the identifier of the given detached
* instance. If there is a persistent instance with the same identifier,
* an exception is thrown. This operation cascades to associated instances
* if the association is mapped with <tt>cascade="save-update"</tt>.
* if the association is mapped with {@code cascade="save-update"}
*
* @param object a detached instance containing updated state
* @throws HibernateException
*/
public void update(Object object) throws HibernateException;
public void update(Object object);
/**
* Update the persistent instance with the identifier of the given detached
* instance. If there is a persistent instance with the same identifier,
* an exception is thrown. This operation cascades to associated instances
* if the association is mapped with <tt>cascade="save-update"</tt>.
* if the association is mapped with {@code cascade="save-update"}
*
* @param entityName The entity name
* @param object a detached instance containing updated state
* @throws HibernateException
*/
public void update(String entityName, Object object) throws HibernateException;
public void update(String entityName, Object object);
/**
* Copy the state of the given object onto the persistent object with the same
@ -466,14 +469,15 @@ public interface Session extends SharedSessionContract {
* given instance is unsaved, save a copy of and return it as a newly persistent
* instance. The given instance does not become associated with the session.
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="merge"</tt>.<br>
* <br>
* with {@code cascade="merge"}
* <p/>
* The semantics of this method are defined by JSR-220.
*
* @param object a detached instance with state to be copied
*
* @return an updated persistent instance
*/
public Object merge(Object object) throws HibernateException;
public Object merge(Object object);
/**
* Copy the state of the given object onto the persistent object with the same
@ -482,58 +486,59 @@ public interface Session extends SharedSessionContract {
* given instance is unsaved, save a copy of and return it as a newly persistent
* instance. The given instance does not become associated with the session.
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="merge"</tt>.<br>
* <br>
* with {@code cascade="merge"}
* <p/>
* The semantics of this method are defined by JSR-220.
*
* @param entityName The entity name
* @param object a detached instance with state to be copied
*
* @return an updated persistent instance
*/
public Object merge(String entityName, Object object) throws HibernateException;
public Object merge(String entityName, Object object);
/**
* Make a transient instance persistent. This operation cascades to associated
* instances if the association is mapped with <tt>cascade="persist"</tt>.<br>
* <br>
* instances if the association is mapped with {@code cascade="persist"}
* <p/>
* The semantics of this method are defined by JSR-220.
*
* @param object a transient instance to be made persistent
*/
public void persist(Object object) throws HibernateException;
public void persist(Object object);
/**
* Make a transient instance persistent. This operation cascades to associated
* instances if the association is mapped with <tt>cascade="persist"</tt>.<br>
* <br>
* instances if the association is mapped with {@code cascade="persist"}
* <p/>
* The semantics of this method are defined by JSR-220.
*
* @param entityName The entity name
* @param object a transient instance to be made persistent
*/
public void persist(String entityName, Object object) throws HibernateException;
public void persist(String entityName, Object object);
/**
* Remove a persistent instance from the datastore. The argument may be
* an instance associated with the receiving <tt>Session</tt> or a transient
* instance with an identifier associated with existing persistent state.
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="delete"</tt>.
* with {@code cascade="delete"}
*
* @param object the instance to be removed
* @throws HibernateException
*/
public void delete(Object object) throws HibernateException;
public void delete(Object object);
/**
* Remove a persistent instance from the datastore. The <b>object</b> argument may be
* an instance associated with the receiving <tt>Session</tt> or a transient
* instance with an identifier associated with existing persistent state.
* This operation cascades to associated instances if the association is mapped
* with <tt>cascade="delete"</tt>.
* with {@code cascade="delete"}
*
* @param entityName The entity name for the instance to be removed.
* @param object the instance to be removed
* @throws HibernateException
*/
public void delete(String entityName, Object object) throws HibernateException;
public void delete(String entityName, Object object);
/**
* Obtain the specified lock level upon the given object. This may be used to
@ -544,11 +549,11 @@ public interface Session extends SharedSessionContract {
*
* @param object a persistent or transient instance
* @param lockMode the lock level
* @throws HibernateException
*
* @deprecated instead call buildLockRequest(LockMode).lock(object)
*/
@Deprecated
public void lock(Object object, LockMode lockMode) throws HibernateException;
public void lock(Object object, LockMode lockMode);
/**
* Obtain the specified lock level upon the given object. This may be used to
@ -559,23 +564,24 @@ public interface Session extends SharedSessionContract {
*
* @param object a persistent or transient instance
* @param lockMode the lock level
* @throws HibernateException
*
* @deprecated instead call buildLockRequest(LockMode).lock(entityName, object)
*/
@SuppressWarnings( {"JavaDoc"})
@Deprecated
public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException;
public void lock(String entityName, Object object, LockMode lockMode);
/**
* Build a LockRequest that specifies the LockMode, pessimistic lock timeout and lock scope.
* timeout and scope is ignored for optimistic locking. After building the LockRequest,
* call LockRequest.lock to perform the requested locking.
*
* Use: session.buildLockRequest().
* setLockMode(LockMode.PESSIMISTIC_WRITE).setTimeOut(1000 * 60).lock(entity);
* <p/>
* Example usage:
* {@code session.buildLockRequest().setLockMode(LockMode.PESSIMISTIC_WRITE).setTimeOut(60000).lock(entity);}
*
* @param lockOptions contains the lock level
*
* @return a lockRequest that can be used to lock the passed object.
* @throws HibernateException
*/
public LockRequest buildLockRequest(LockOptions lockOptions);
@ -591,9 +597,8 @@ public interface Session extends SharedSessionContract {
* </ul>
*
* @param object a persistent or detached instance
* @throws HibernateException
*/
public void refresh(Object object) throws HibernateException;
public void refresh(Object object);
/**
* Re-read the state of the given instance from the underlying database. It is
@ -608,9 +613,8 @@ public interface Session extends SharedSessionContract {
*
* @param entityName a persistent class
* @param object a persistent or detached instance
* @throws HibernateException
*/
public void refresh(String entityName, Object object) throws HibernateException;
public void refresh(String entityName, Object object);
/**
* Re-read the state of the given instance from the underlying database, with
@ -620,11 +624,11 @@ public interface Session extends SharedSessionContract {
*
* @param object a persistent or detached instance
* @param lockMode the lock mode to use
* @throws HibernateException
*
* @deprecated LockMode parameter should be replaced with LockOptions
*/
@Deprecated
public void refresh(Object object, LockMode lockMode) throws HibernateException;
public void refresh(Object object, LockMode lockMode);
/**
* Re-read the state of the given instance from the underlying database, with
@ -634,9 +638,8 @@ public interface Session extends SharedSessionContract {
*
* @param object a persistent or detached instance
* @param lockOptions contains the lock mode to use
* @throws HibernateException
*/
public void refresh(Object object, LockOptions lockOptions) throws HibernateException;
public void refresh(Object object, LockOptions lockOptions);
/**
* Re-read the state of the given instance from the underlying database, with
@ -647,17 +650,17 @@ public interface Session extends SharedSessionContract {
* @param entityName a persistent class
* @param object a persistent or detached instance
* @param lockOptions contains the lock mode to use
* @throws HibernateException
*/
public void refresh(String entityName, Object object, LockOptions lockOptions) throws HibernateException;
public void refresh(String entityName, Object object, LockOptions lockOptions);
/**
* Determine the current lock mode of the given object.
*
* @param object a persistent instance
*
* @return the current lock mode
* @throws HibernateException
*/
public LockMode getCurrentLockMode(Object object) throws HibernateException;
public LockMode getCurrentLockMode(Object object);
/**
* Create a {@link Query} instance for the given collection and filter string. Contains an implicit {@code FROM}
@ -685,10 +688,10 @@ public interface Session extends SharedSessionContract {
*
* @param clazz a persistent class
* @param id an identifier
*
* @return a persistent instance or null
* @throws HibernateException
*/
public Object get(Class clazz, Serializable id) throws HibernateException;
public Object get(Class clazz, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -699,12 +702,13 @@ public interface Session extends SharedSessionContract {
* @param clazz a persistent class
* @param id an identifier
* @param lockMode the lock mode
*
* @return a persistent instance or null
* @throws HibernateException
*
* @deprecated LockMode parameter should be replaced with LockOptions
*/
@Deprecated
public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException;
public Object get(Class clazz, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -715,10 +719,10 @@ public interface Session extends SharedSessionContract {
* @param clazz a persistent class
* @param id an identifier
* @param lockOptions the lock mode
*
* @return a persistent instance or null
* @throws HibernateException
*/
public Object get(Class clazz, Serializable id, LockOptions lockOptions) throws HibernateException;
public Object get(Class clazz, Serializable id, LockOptions lockOptions);
/**
* Return the persistent instance of the given named entity with the given identifier,
@ -727,10 +731,10 @@ public interface Session extends SharedSessionContract {
*
* @param entityName the entity name
* @param id an identifier
*
* @return a persistent instance or null
* @throws HibernateException
*/
public Object get(String entityName, Serializable id) throws HibernateException;
public Object get(String entityName, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -741,12 +745,13 @@ public interface Session extends SharedSessionContract {
* @param entityName the entity name
* @param id an identifier
* @param lockMode the lock mode
*
* @return a persistent instance or null
* @throws HibernateException
*
* @deprecated LockMode parameter should be replaced with LockOptions
*/
@Deprecated
public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
public Object get(String entityName, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
@ -757,19 +762,19 @@ public interface Session extends SharedSessionContract {
* @param entityName the entity name
* @param id an identifier
* @param lockOptions contains the lock mode
*
* @return a persistent instance or null
* @throws HibernateException
*/
public Object get(String entityName, Serializable id, LockOptions lockOptions) throws HibernateException;
public Object get(String entityName, Serializable id, LockOptions lockOptions);
/**
* Return the entity name for a persistent entity
*
* @param object a persistent entity
*
* @return the entity name
* @throws HibernateException
*/
public String getEntityName(Object object) throws HibernateException;
public String getEntityName(Object object);
/**
* Create an {@link IdentifierLoadAccess} instance to retrieve the specified entity type by
@ -823,6 +828,7 @@ public interface Session extends SharedSessionContract {
* Enable the named filter for this current session.
*
* @param filterName The name of the filter to be enabled.
*
* @return The Filter instance representing the enabled filter.
*/
public Filter enableFilter(String filterName);
@ -831,6 +837,7 @@ public interface Session extends SharedSessionContract {
* Retrieve a currently enabled filter by name.
*
* @param filterName The name of the filter to be retrieved.
*
* @return The Filter instance representing the enabled filter.
*/
public Filter getEnabledFilter(String filterName);
@ -844,6 +851,8 @@ public interface Session extends SharedSessionContract {
/**
* Get the statistics for this session.
*
* @return The session statistics being collected for this session
*/
public SessionStatistics getStatistics();
@ -854,9 +863,8 @@ public interface Session extends SharedSessionContract {
* entities and proxies that are loaded into the session:
* @see org.hibernate.Session#isDefaultReadOnly()
*
* @param entityOrProxy, an entity or HibernateProxy
* @return true, the entity or proxy is read-only;
* false, the entity or proxy is modifiable.
* @param entityOrProxy an entity or HibernateProxy
* @return {@code true} if the entity or proxy is read-only, {@code false} if the entity or proxy is modifiable.
*/
public boolean isReadOnly(Object entityOrProxy);
@ -876,15 +884,14 @@ public interface Session extends SharedSessionContract {
* and proxies loaded by a Query:
* @see Query#setReadOnly(boolean)
*
* @param entityOrProxy, an entity or HibernateProxy
* @param readOnly, if true, the entity or proxy is made read-only;
* if false, the entity or proxy is made modifiable.
* @param entityOrProxy an entity or HibernateProxy
* @param readOnly {@code true} if the entity or proxy should be made read-only; {@code false} if the entity or
* proxy should be made modifiable
*/
public void setReadOnly(Object entityOrProxy, boolean readOnly);
/**
* Controller for allowing users to perform JDBC related work using the Connection
* managed by this Session.
* Controller for allowing users to perform JDBC related work using the Connection managed by this Session.
*
* @param work The work to be performed.
* @throws HibernateException Generally indicates wrapped {@link java.sql.SQLException}
@ -892,12 +899,13 @@ public interface Session extends SharedSessionContract {
public void doWork(Work work) throws HibernateException;
/**
* Controller for allowing users to perform JDBC related work using the Connection
* managed by this Session, returning the result from calling <code>work.execute()</code>
* ({@link ReturningWork<T>.execute(Connection)}/
* Controller for allowing users to perform JDBC related work using the Connection managed by this Session. After
* execution returns the result of the {@link ReturningWork#execute} call.
*
* @param work The work to be performed.
* @return the result from calling <code>work.execute()</code>.
*
* @return the result from calling {@link ReturningWork#execute}.
*
* @throws HibernateException Generally indicates wrapped {@link java.sql.SQLException}
*/
public <T> T doReturningWork(ReturningWork<T> work) throws HibernateException;
@ -912,11 +920,11 @@ public interface Session extends SharedSessionContract {
* For non-user-supplied scenarios, normal transaction management already handles disconnection and reconnection
* automatically.
*
* @return the application-supplied connection or {@literal null}
* @return the application-supplied connection or {@code null}
*
* @see #reconnect(Connection)
*/
Connection disconnect() throws HibernateException;
Connection disconnect();
/**
* Reconnect to the given JDBC connection.
@ -925,7 +933,7 @@ public interface Session extends SharedSessionContract {
*
* @see #disconnect()
*/
void reconnect(Connection connection) throws HibernateException;
void reconnect(Connection connection);
/**
* Is a particular fetch profile enabled on this session?
@ -996,7 +1004,7 @@ public interface Session extends SharedSessionContract {
/**
* Specify the LockMode to be used. The default is LockMode.none.
*
* @param lockMode
* @param lockMode The lock mode to use for this request
*
* @return this LockRequest instance for operation chaining.
*/

View File

@ -47,7 +47,7 @@ public interface Transaction {
/**
* Is this transaction the initiator of any underlying transaction?
*
* @return {@literal true} if this transaction initiated the underlying transaction; {@literal false} otherwise.
* @return {@code true} if this transaction initiated the underlying transaction; {@code false} otherwise.
*/
public boolean isInitiator();
@ -102,7 +102,7 @@ public interface Transaction {
* transaction is active when it is initiated directly through the JDBC {@link java.sql.Connection}, only when
* it is initiated from here.
*
* @return {@literal true} if the transaction is still active; {@literal false} otherwise.
* @return {@code true} if the transaction is still active; {@code false} otherwise.
*
* @throws HibernateException Indicates a problem checking the transaction status.
*/
@ -113,7 +113,8 @@ public interface Transaction {
* <p/>
* Generally speaking this will be the same as {@link #isActive()}.
*
* @return
* @return {@code true} if Hibernate is known to be participating in the underlying transaction; {@code false}
* otherwise.
*/
public boolean isParticipating();
@ -124,10 +125,11 @@ public interface Transaction {
* transaction was committed when the commit was performed directly through the JDBC {@link java.sql.Connection},
* only when the commit was done from this.
*
* @return {@literal true} if the transaction is rolled back; {@literal false} otherwise.
* @return {@code true} if the transaction is rolled back; {@code false} otherwise.
*
* @throws HibernateException Indicates a problem checking the transaction status.
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean wasCommitted();
/**
@ -141,6 +143,7 @@ public interface Transaction {
*
* @throws HibernateException Indicates a problem checking the transaction status.
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean wasRolledBack();
/**

View File

@ -1,10 +1,10 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
~ Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Middleware LLC.
~ distributed under license by Red Hat Inc.
~
~ This copyrighted material is made available to anyone wishing to use, modify,
~ copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,43 +20,13 @@
~ Free Software Foundation, Inc.
~ 51 Franklin Street, Fifth Floor
~ Boston, MA 02110-1301 USA
~
-->
<html>
<head>
</head>
<body>
<p>
This package defines Hibernate second level cache service. {@link org.hibernate.cache.spi} defines the
SPI used to integrate with Hibernate internals.
</p>
<p>
The legacy (and now deprecated) approach to caching is defined by the {@link org.hibernate.cache.CacheProvider} and
{@link org.hibernate.cache.Cache} interfaces as well as the {@link org.hibernate.cache.CacheConcurrencyStrategy}
interface along with the various implementations of all these interfaces. In that scheme, a
{@link org.hibernate.cache.CacheProvider} defined how to configure and perform lifecycle operations
in regards to a particular underlying caching library; it also defined how to build {@link org.hibernate.cache.Cache}
instances which in turn defined how to access the "regions" of the underlying cache instance.
For entity and collection data cache regions, {@link org.hibernate.cache.CacheConcurrencyStrategy} wrapped
access to those cache regions to apply transactional/concurrent access semantics.
</p>
<p>
The improved approach is based on {@link org.hibernate.cache.RegionFactory}, the various
{@link org.hibernate.cache.Region} specializations and the two access strategies contracts
({@link org.hibernate.cache.access.EntityRegionAccessStrategy} and
{@link org.hibernate.cache.access.CollectionRegionAccessStrategy}). The general approach here is that
{@link org.hibernate.cache.RegionFactory} defined how to configure and perform lifecycle operations
in regards to a particular underlying caching library (<b>or libraries</b>).
{@link org.hibernate.cache.RegionFactory} also defines how to build specialized
{@link org.hibernate.cache.Region} instances based on the type of data we will be storing in that given
region. The fact that {@link org.hibernate.cache.RegionFactory} is asked to build <b>specialized</b>
regions (as opposed to just general access) is the first <i>improvement</i> over the legacy scheme. The
second <i>improvement</i> is the fact that the regions (well the ones like entity and collection regions
that are responsible for storing {@link org.hibernate.cache.TransactionalDataRegion transactional} data) are
asked to build their own access strategies (see {@link org.hibernate.cache.EntityRegion#buildAccessStrategy}
and {@link org.hibernate.cache.CollectionRegion#buildAccessStrategy}).
This package defines API of the Hibernate second level cache service. The
<a href="{@docRoot}/org/hibernate/cache/spi">org.hibernate.cache.spi</a> package defines the SPI used to
integrate with Hibernate internals.
</p>
</body>
</html>

View File

@ -34,25 +34,24 @@ public interface CacheDataDescription {
/**
* Is the data marked as being mutable?
*
* @return True if the data is mutable; false otherwise.
* @return {@code true} if the data is mutable; {@code false} otherwise.
*/
public boolean isMutable();
/**
* Is the data to be cached considered versioned?
* <p/>
* If true, it is illegal for {@link #getVersionComparator} to return
* null.
*
* @return True if the data is versioned; false otherwise.
* If {@code true}, it is illegal for {@link #getVersionComparator} to return {@code null}.
*
* @return {@code true} if the data is versioned; {@code false} otherwise.
*/
public boolean isVersioned();
/**
* Get the comparator used to compare two different version values.
* <p/>
* May return null <b>if</b> {@link #isVersioned()} returns false.
* @return
* Get the comparator used to compare two different version values. May return {@code null} <b>if</b>
* {@link #isVersioned()} returns false.
*
* @return The comparator for versions, or {@code null}
*/
public Comparator getVersionComparator();
}

View File

@ -24,7 +24,7 @@
package org.hibernate.cache.spi;
/**
* Marker interface for identifying {@link Cache} implementations which are aware of JTA transactions
* Marker interface for identifying {@link org.hibernate.Cache} implementations which are aware of JTA transactions
*
* @author Steve Ebersole
*/

View File

@ -24,8 +24,7 @@
package org.hibernate.cache.spi.access;
/**
* Moved up from inner definition on the now deprecated
* {@link org.hibernate.cache.spi.CacheConcurrencyStrategy}.
* Marker object for use by synchronous concurrency strategies
*
* @author Steve Ebersole
*/

View File

@ -1,10 +1,10 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
~ Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Middleware LLC.
~ distributed under license by Red Hat Inc.
~
~ This copyrighted material is made available to anyone wishing to use, modify,
~ copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,19 +20,17 @@
~ Free Software Foundation, Inc.
~ 51 Franklin Street, Fifth Floor
~ Boston, MA 02110-1301 USA
~
-->
<html>
<head></head>
<body>
<p>
Defines contracts for transactional and concurrent access to cached
{@link org.hibernate.cache.access.EntityRegionAccessStrategy entity} and
{@link org.hibernate.cache.access.CollectionRegionAccessStrategy collection} data. Transactions pass in a
{@link org.hibernate.cache.spi.access.EntityRegionAccessStrategy entity} and
{@link org.hibernate.cache.spi.access.CollectionRegionAccessStrategy collection} data. Transactions pass in a
timestamp indicating transaction start time which is then used to protect against concurrent access (exactly how
that occurs is based on the actual access-strategy impl used). Two different implementation patterns are provided
for.
for:
<ul>
<li>
A transaction-aware cache implementation might be wrapped by a <i>synchronous</i> access strategy,
@ -45,22 +43,22 @@
item.
</li>
</ul>
The <i>asynchronous</i> access strategies are: {@link org.hibernate.cache.access.AccessType.READ_ONLY read-only},
{@link org.hibernate.cache.access.AccessType.READ_WRITE read-write} and
{@link org.hibernate.cache.access.AccessType.NONSTRICT_READ_WRITE nonstrict-read-write}. The only
<i>synchronous</i> access strategy is {@link org.hibernate.cache.access.AccessType.TRANSACTIONAL transactional}.
The <i>asynchronous</i> access strategies are: {@link org.hibernate.cache.spi.access.AccessType#READ_ONLY read-only},
{@link org.hibernate.cache.spi.access.AccessType#READ_WRITE read-write} and
{@link org.hibernate.cache.spi.access.AccessType#NONSTRICT_READ_WRITE nonstrict-read-write}. The only
<i>synchronous</i> access strategy is {@link org.hibernate.cache.spi.access.AccessType#TRANSACTIONAL transactional}.
</p>
<p>
Note that, for an <i>asynchronous</i> cache, cache invalidation must be a two step process (lock->unlock or
lock->afterUpdate), since this is the only way to guarantee consistency with the database for a nontransactional
cache implementation. For a <i>synchronous</i> cache, cache invalidation is a single step process (evict or update).
Hence, these contracts ({@link org.hibernate.cache.access.EntityRegionAcessStrategy} and
{@link org.hibernate.cache.access.CollectionRegionAccessStrategy}) define a three step process to cater for both
Hence, these contracts ({@link org.hibernate.cache.spi.access.EntityRegionAccessStrategy} and
{@link org.hibernate.cache.spi.access.CollectionRegionAccessStrategy}) define a three step process to cater for both
models (see the individual contracts for details).
</p>
<p>
Note that query result caching does not go through an access strategy; those caches are managed directly against
the underlying {@link org.hibernate.cache.QueryResultsRegion}.
the underlying {@link org.hibernate.cache.spi.QueryResultsRegion}.
</p>
</body>
</html>

View File

@ -1,10 +1,10 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
~ Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Middleware LLC.
~ distributed under license by Red Hat Inc.
~
~ This copyrighted material is made available to anyone wishing to use, modify,
~ copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,16 +20,12 @@
~ Free Software Foundation, Inc.
~ 51 Franklin Street, Fifth Floor
~ Boston, MA 02110-1301 USA
~
-->
<html>
<head>
</head>
<body>
<p>
This package defines formats for disassembled state
kept in the second level cache.
This package defines formats for disassembled state kept in the second level cache.
</p>
</body>
</html>

View File

@ -0,0 +1,37 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Inc.
~
~ This copyrighted material is made available to anyone wishing to use, modify,
~ copy, or redistribute it subject to the terms and conditions of the GNU
~ Lesser General Public License, as published by the Free Software Foundation.
~
~ This program is distributed in the hope that it will be useful,
~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
~ for more details.
~
~ You should have received a copy of the GNU Lesser General Public License
~ along with this distribution; if not, write to:
~ Free Software Foundation, Inc.
~ 51 Franklin Street, Fifth Floor
~ Boston, MA 02110-1301 USA
-->
<html>
<body>
<p>
Defines the Hibernate second level caching SPI.
</p>
<p>
The initial contract here is {@link org.hibernate.cache.spi.RegionFactory} whose implementations are
responsible for configuring and managing lifecycle operations in regards to the particular underlying
caching library. Its other main purpose is to build specializations {@link org.hibernate.cache.spi.Region}
instances based on the type of data we will be storing in that given region.
</p>
</body>
</html>

View File

@ -168,6 +168,7 @@ import org.hibernate.usertype.UserType;
* @author Gavin King
* @see org.hibernate.SessionFactory
*/
@SuppressWarnings( {"UnusedDeclaration"})
public class Configuration implements Serializable {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, Configuration.class.getName());
@ -1507,16 +1508,14 @@ public class Configuration implements Serializable {
RuntimeException originalException = null;
while ( !stopProcess ) {
List<FkSecondPass> failingSecondPasses = new ArrayList<FkSecondPass>();
Iterator<FkSecondPass> it = endOfQueueFkSecondPasses.listIterator();
while ( it.hasNext() ) {
final FkSecondPass pass = it.next();
for ( FkSecondPass pass : endOfQueueFkSecondPasses ) {
try {
pass.doSecondPass( classes );
}
catch ( RecoverableException e ) {
catch (RecoverableException e) {
failingSecondPasses.add( pass );
if ( originalException == null ) {
originalException = ( RuntimeException ) e.getCause();
originalException = (RuntimeException) e.getCause();
}
}
}
@ -1621,7 +1620,7 @@ public class Configuration implements Serializable {
LOG.debug( "Processing foreign key constraints" );
itr = getTableMappings();
Set done = new HashSet();
Set<ForeignKey> done = new HashSet<ForeignKey>();
while ( itr.hasNext() ) {
secondPassCompileForeignKeys( (Table) itr.next(), done );
}
@ -1639,7 +1638,7 @@ public class Configuration implements Serializable {
if ( extendsQueue.size() > 0 ) {
Iterator iterator = extendsQueue.keySet().iterator();
StringBuffer buf = new StringBuffer( "Following super classes referenced in extends not found: " );
StringBuilder buf = new StringBuilder( "Following super classes referenced in extends not found: " );
while ( iterator.hasNext() ) {
final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iterator.next();
buf.append( entry.getExplicitName() );
@ -1670,7 +1669,7 @@ public class Configuration implements Serializable {
return null;
}
protected void secondPassCompileForeignKeys(Table table, Set done) throws MappingException {
protected void secondPassCompileForeignKeys(Table table, Set<ForeignKey> done) throws MappingException {
table.createForeignKeys();
Iterator iter = table.getForeignKeyIterator();
while ( iter.hasNext() ) {
@ -1714,7 +1713,9 @@ public class Configuration implements Serializable {
* {@link SessionFactory} will be immutable, so changes made to {@code this} {@link Configuration} after
* building the {@link SessionFactory} will not affect it.
*
* @return The build {@link SessionFactory}
* @param serviceRegistry The registry of services to be used in creating this session factory.
*
* @return The built {@link SessionFactory}
*
* @throws HibernateException usually indicates an invalid configuration or invalid mapping information
*/
@ -1776,7 +1777,7 @@ public class Configuration implements Serializable {
}
/**
* Rterieve the configured {@link Interceptor}.
* Retrieve the configured {@link Interceptor}.
*
* @return The current {@link Interceptor}
*/
@ -1787,7 +1788,7 @@ public class Configuration implements Serializable {
/**
* Set the current {@link Interceptor}
*
* @param interceptor The {@link Interceptor} to use for the {@link #buildSessionFactory) built}
* @param interceptor The {@link Interceptor} to use for the {@link #buildSessionFactory built}
* {@link SessionFactory}.
*
* @return this for method chaining
@ -2226,8 +2227,6 @@ public class Configuration implements Serializable {
* @param collectionRole The name of the collection to which we should associate these cache settings
* @param concurrencyStrategy The cache strategy to use
* @param region The name of the cache region to use
*
* @return this for method chaining
*/
public void setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy, String region) {
caches.add( new CacheHolder( collectionRole, concurrencyStrategy, region, false, false ) );
@ -2254,6 +2253,8 @@ public class Configuration implements Serializable {
/**
* Create an object-oriented view of the configuration properties
*
* @param serviceRegistry The registry of services to be used in building these settings.
*
* @return The build settings
*/
public Settings buildSettings(ServiceRegistry serviceRegistry) {
@ -2442,6 +2443,7 @@ public class Configuration implements Serializable {
* Internal implementation of the Mappings interface giving access to the Configuration's internal
* <tt>metadata repository</tt> state ({@link Configuration#classes}, {@link Configuration#tables}, etc).
*/
@SuppressWarnings( {"deprecation", "unchecked"})
protected class MappingsImpl implements ExtendedMappings, Serializable {
private String schemaName;
@ -2824,7 +2826,7 @@ public class Configuration implements Serializable {
}
private String buildTableNameKey(String schema, String catalog, String finalName) {
StringBuffer keyBuilder = new StringBuffer();
StringBuilder keyBuilder = new StringBuilder();
if (schema != null) keyBuilder.append( schema );
keyBuilder.append( ".");
if (catalog != null) keyBuilder.append( catalog );

View File

@ -259,18 +259,6 @@ public class TableBinder {
}
/**
*
* @param schema
* @param catalog
* @param realTableName
* @param logicalName
* @param isAbstract
* @param uniqueConstraints
* @param constraints
* @param denormalizedSuperTable
* @param mappings
* @return
*
* @deprecated Use {@link #buildAndFillTable} instead.
*/
@Deprecated

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,21 +20,21 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.criterion;
/**
* Factory class for criterion instances that represent expressions
* involving subqueries.
*
* @see Restriction
* @see Restrictions
* @see Projection
* @see org.hibernate.Criteria
*
* @author Gavin King
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
@SuppressWarnings( {"UnusedDeclaration"})
public class Subqueries {
public static Criterion exists(DetachedCriteria dc) {

View File

@ -337,20 +337,20 @@ public abstract class Dialect {
/**
* Allows the dialect to override a {@link SqlTypeDescriptor}.
* <p/>
* If <code>sqlTypeDescriptor</code> is a "standard basic" SQL type
* descriptor, then this method uses {@link #getSqlTypeDescriptorOverride}
* to get an optional override based on the SQL code returned by
* If the passed {@code sqlTypeDescriptor} allows itself to be remapped (per
* {@link org.hibernate.type.descriptor.sql.SqlTypeDescriptor#canBeRemapped()}), then this method uses
* {@link #getSqlTypeDescriptorOverride} to get an optional override based on the SQL code returned by
* {@link SqlTypeDescriptor#getSqlType()}.
* <p/>
* If this dialect does not provide an override, then this method
* simply returns <code>sqlTypeDescriptor</code>
* If this dialect does not provide an override or if the {@code sqlTypeDescriptor} doe not allow itself to be
* remapped, then this method simply returns the original passed {@code sqlTypeDescriptor}
*
* @param sqlTypeDescriptor The {@link SqlTypeDescriptor} to override
* @return The {@link SqlTypeDescriptor} that should be used for this dialect;
* if there is no override, then <code>sqlTypeDescriptor</code> is returned.
* @throws IllegalArgumentException if <code>sqlTypeDescriptor</code> is null.
* if there is no override, then original {@code sqlTypeDescriptor} is returned.
* @throws IllegalArgumentException if {@code sqlTypeDescriptor} is null.
*
* @see {@link #getSqlTypeDescriptorOverride}
* @see #getSqlTypeDescriptorOverride
*/
public SqlTypeDescriptor remapSqlTypeDescriptor(SqlTypeDescriptor sqlTypeDescriptor) {
if ( sqlTypeDescriptor == null ) {
@ -365,13 +365,11 @@ public abstract class Dialect {
}
/**
* Returns the {@link SqlTypeDescriptor} that should override the
* "standard basic" SQL type descriptor for values of the specified
* column type, or null, if there is no override.
* Returns the {@link SqlTypeDescriptor} that should be used to handle the given JDBC type code. Returns
* {@code null} if there is no override.
*
* @param sqlCode A {@link Types} constant indicating the SQL column type
* @return The {@link SqlTypeDescriptor} that should override the
* "standard basic" SQL type descriptor, or null, if there is no override.
* @return The {@link SqlTypeDescriptor} to use as an override, or {@code null} if there is no override.
*/
protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
SqlTypeDescriptor descriptor;
@ -395,6 +393,7 @@ public abstract class Dialect {
/**
* The legacy behavior of Hibernate. LOBs are not processed by merge
*/
@SuppressWarnings( {"UnusedDeclaration"})
protected static final LobMergeStrategy LEGACY_LOB_MERGE_STRATEGY = new LobMergeStrategy() {
@Override
public Blob mergeBlob(Blob original, Blob target, SessionImplementor session) {
@ -415,6 +414,7 @@ public abstract class Dialect {
/**
* Merge strategy based on transferring contents based on streams.
*/
@SuppressWarnings( {"UnusedDeclaration"})
protected static final LobMergeStrategy STREAM_XFER_LOB_MERGE_STRATEGY = new LobMergeStrategy() {
@Override
public Blob mergeBlob(Blob original, Blob target, SessionImplementor session) {
@ -533,12 +533,13 @@ public abstract class Dialect {
/**
* Get the name of the Hibernate {@link org.hibernate.type.Type} associated with the given
* {@link java.sql.Types} typecode.
* {@link java.sql.Types} type code.
*
* @param code The {@link java.sql.Types} typecode
* @param code The {@link java.sql.Types} type code
* @return The Hibernate {@link org.hibernate.type.Type} name.
* @throws HibernateException If no mapping was specified for that type.
*/
@SuppressWarnings( {"UnusedDeclaration"})
public String getHibernateTypeName(int code) throws HibernateException {
String result = hibernateTypeNames.get( code );
if ( result == null ) {
@ -1046,8 +1047,8 @@ public abstract class Dialect {
* Dialect a chance to convert that value based on what the underlying db or driver will expect.
* <p/>
* NOTE: what gets passed into {@link #getLimitString(String,int,int)} is the zero-based offset. Dialects which
* do not {@link #supportsVariableLimit} should take care to perform any needed {@link #convertToFirstRowValue}
* calls prior to injecting the limit values into the SQL string.
* do not {@link #supportsVariableLimit} should take care to perform any needed first-row-conversion calls prior
* to injecting the limit values into the SQL string.
*
* @param zeroBasedFirstResult The user-supplied, zero-based first row offset.
*
@ -1128,6 +1129,7 @@ public abstract class Dialect {
return getForUpdateString( lockMode, lockOptions.getTimeOut() );
}
@SuppressWarnings( {"deprecation"})
private String getForUpdateString(LockMode lockMode, int timeout){
switch ( lockMode ) {
case UPGRADE:
@ -2171,6 +2173,7 @@ public abstract class Dialect {
* @return Returns {@code true} if the database supports accepting bind params as args, {@code false} otherwise. The
* default is {@code true}.
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean supportsBindAsCallableArgument() {
return true;
}

View File

@ -31,9 +31,7 @@ import org.hibernate.sql.CaseFragment;
/**
* A dialect for Oracle 9i databases.
* <p/>
* Unlike the older (deprecated) {@link Oracle9Dialect), this version specifies
* to not use "ANSI join syntax" because 9i does not seem to properly
* handle it in all cases.
* Specifies to not use "ANSI join syntax" because 9i does not seem to properly handle it in all cases.
*
* @author Steve Ebersole
*/
@ -67,7 +65,7 @@ public class Oracle9iDialect extends Oracle8iDialect {
isForUpdate = true;
}
StringBuffer pagingSelect = new StringBuffer( sql.length()+100 );
StringBuilder pagingSelect = new StringBuilder( sql.length() + 100 );
if (hasOffset) {
pagingSelect.append("select * from ( select row_.*, rownum rownum_ from ( ");
}

View File

@ -143,8 +143,7 @@ public class SQLServer2005Dialect extends SQLServerDialect {
/**
* Utility method that checks if the given sql query is a select distinct one and if so replaces the distinct select
* with an equivalent simple select with a group by clause. See
* {@link SQLServer2005DialectTestCase#testReplaceDistinctWithGroupBy()}
* with an equivalent simple select with a group by clause.
*
* @param sql an sql query
*/
@ -158,7 +157,7 @@ public class SQLServer2005Dialect extends SQLServerDialect {
/**
* This utility method searches the given sql query for the fields of the select statement and returns them without
* the aliases. See {@link SQLServer2005DialectTestCase#testGetSelectFieldsWithoutAliases()}
* the aliases.
*
* @param sql sql query
*
@ -172,7 +171,7 @@ public class SQLServer2005Dialect extends SQLServerDialect {
}
/**
* Utility method that strips the aliases. See {@link SQLServer2005DialectTestCase#testStripAliases()}
* Utility method that strips the aliases.
*
* @param str string to replace the as statements
*

View File

@ -72,23 +72,21 @@ import org.hibernate.proxy.LazyInitializer;
import org.hibernate.tuple.ElementWrapper;
/**
* A <tt>PersistenceContext</tt> represents the state of persistent "stuff" which
* Hibernate is tracking. This includes persistent entities, collections,
* as well as proxies generated.
* A <strong>stateful</strong> implementation of the {@link PersistenceContext} contract meaning that we maintain this
* state throughout the life of the persistence context.
* </p>
* There is meant to be a one-to-one correspondence between a SessionImpl and
* a PersistentContext. The SessionImpl uses the PersistentContext to track
* the current state of its context. Event-listeners then use the
* PersistentContext to drive their processing.
* IMPL NOTE: There is meant to be a one-to-one correspondence between a {@link org.hibernate.internal.SessionImpl}
* and a PersistentContext. Event listeners and other Session collaborators then use the PersistentContext to drive
* their processing.
*
* @author Steve Ebersole
*/
public class StatefulPersistenceContext implements PersistenceContext {
public static final Object NO_ROW = new MarkerObject( "NO_ROW" );
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, StatefulPersistenceContext.class.getName() );
public static final Object NO_ROW = new MarkerObject( "NO_ROW" );
private static final int INIT_COLL_SIZE = 8;
private SessionImplementor session;
@ -134,7 +132,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
// Parent entities cache by their child for cascading
// May be empty or not contains all relation
private Map parentsByChild;
private Map<Object,Object> parentsByChild;
private int cascading = 0;
private int loadCounter = 0;
@ -156,36 +154,40 @@ public class StatefulPersistenceContext implements PersistenceContext {
public StatefulPersistenceContext(SessionImplementor session) {
this.session = session;
entitiesByKey = new HashMap( INIT_COLL_SIZE );
entitiesByUniqueKey = new HashMap( INIT_COLL_SIZE );
proxiesByKey = new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK );
entitySnapshotsByKey = new HashMap( INIT_COLL_SIZE );
entitiesByKey = new HashMap<EntityKey, Object>( INIT_COLL_SIZE );
entitiesByUniqueKey = new HashMap<EntityUniqueKey, Object>( INIT_COLL_SIZE );
//noinspection unchecked
proxiesByKey = (Map<EntityKey, Object>) new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK );
entitySnapshotsByKey = new HashMap<EntityKey, Object>( INIT_COLL_SIZE );
entityEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
collectionEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
parentsByChild = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
collectionsByKey = new HashMap( INIT_COLL_SIZE );
collectionsByKey = new HashMap<CollectionKey, PersistentCollection>( INIT_COLL_SIZE );
arrayHolders = new IdentityHashMap<Object, PersistentCollection>( INIT_COLL_SIZE );
nullifiableEntityKeys = new HashSet();
nullifiableEntityKeys = new HashSet<EntityKey>();
initTransientState();
}
private void initTransientState() {
nullAssociations = new HashSet( INIT_COLL_SIZE );
nonlazyCollections = new ArrayList( INIT_COLL_SIZE );
nullAssociations = new HashSet<AssociationKey>( INIT_COLL_SIZE );
nonlazyCollections = new ArrayList<PersistentCollection>( INIT_COLL_SIZE );
}
@Override
public boolean isStateless() {
return false;
}
@Override
public SessionImplementor getSession() {
return session;
}
@Override
public LoadContexts getLoadContexts() {
if ( loadContexts == null ) {
loadContexts = new LoadContexts( this );
@ -193,6 +195,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return loadContexts;
}
@Override
public void addUnownedCollection(CollectionKey key, PersistentCollection collection) {
if (unownedCollections==null) {
unownedCollections = new HashMap<CollectionKey,PersistentCollection>(8);
@ -200,6 +203,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
unownedCollections.put( key, collection );
}
@Override
public PersistentCollection useUnownedCollection(CollectionKey key) {
if ( unownedCollections == null ) {
return null;
@ -209,10 +213,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
/**
* Get the <tt>BatchFetchQueue</tt>, instantiating one if
* necessary.
*/
@Override
public BatchFetchQueue getBatchFetchQueue() {
if (batchFetchQueue==null) {
batchFetchQueue = new BatchFetchQueue(this);
@ -220,6 +221,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return batchFetchQueue;
}
@Override
public void clear() {
for ( Object o : proxiesByKey.values() ) {
final LazyInitializer li = ((HibernateProxy) o).getHibernateLazyInitializer();
@ -251,24 +253,22 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isDefaultReadOnly() {
return defaultReadOnly;
}
/**
* {@inheritDoc}
*/
@Override
public void setDefaultReadOnly(boolean defaultReadOnly) {
this.defaultReadOnly = defaultReadOnly;
}
@Override
public boolean hasNonReadOnlyEntities() {
return hasNonReadOnlyEntities;
}
@Override
public void setEntryStatus(EntityEntry entry, Status status) {
entry.setStatus(status);
setHasNonReadOnlyEnties(status);
@ -280,6 +280,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
@Override
public void afterTransactionCompletion() {
cleanUpInsertedKeysAfterTransaction();
// Downgrade locks
@ -292,6 +293,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Get the current state of the entity as known to the underlying
* database, or null if there is no corresponding row
*/
@Override
public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister)
throws HibernateException {
final EntityKey key = session.generateEntityKey( id, persister );
@ -306,6 +308,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
@Override
public Object[] getNaturalIdSnapshot(Serializable id, EntityPersister persister)
throws HibernateException {
if ( !persister.hasNaturalIdentifier() ) {
@ -317,8 +320,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
int[] props = persister.getNaturalIdentifierProperties();
boolean[] updateable = persister.getPropertyUpdateability();
boolean allNatualIdPropsAreUpdateable = true;
for ( int i = 0; i < props.length; i++ ) {
if ( !updateable[ props[i] ] ) {
for ( int prop : props ) {
if ( !updateable[prop] ) {
allNatualIdPropsAreUpdateable = false;
break;
}
@ -354,6 +357,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return The cached snapshot
* @throws IllegalStateException if the cached snapshot was == {@link #NO_ROW}.
*/
@Override
public Object[] getCachedDatabaseSnapshot(EntityKey key) {
Object snapshot = entitySnapshotsByKey.get( key );
if ( snapshot == NO_ROW ) {
@ -362,10 +366,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return ( Object[] ) snapshot;
}
/*public void removeDatabaseSnapshot(EntityKey key) {
entitySnapshotsByKey.remove(key);
}*/
@Override
public void addEntity(EntityKey key, Object entity) {
entitiesByKey.put(key, entity);
getBatchFetchQueue().removeBatchLoadableEntityKey(key);
@ -375,10 +376,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Get the entity instance associated with the given
* <tt>EntityKey</tt>
*/
@Override
public Object getEntity(EntityKey key) {
return entitiesByKey.get(key);
}
@Override
public boolean containsEntity(EntityKey key) {
return entitiesByKey.containsKey(key);
}
@ -388,6 +391,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* up other state associated with the entity, all except
* for the <tt>EntityEntry</tt>
*/
@Override
public Object removeEntity(EntityKey key) {
Object entity = entitiesByKey.remove(key);
Iterator iter = entitiesByUniqueKey.values().iterator();
@ -406,6 +410,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get an entity cached by unique key
*/
@Override
public Object getEntity(EntityUniqueKey euk) {
return entitiesByUniqueKey.get(euk);
}
@ -413,6 +418,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Add an entity to the cache by unique key
*/
@Override
public void addEntity(EntityUniqueKey euk, Object entity) {
entitiesByUniqueKey.put(euk, entity);
}
@ -423,6 +429,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @param entity The entity for which to locate the EntityEntry.
* @return The EntityEntry for the given entity.
*/
@Override
public EntityEntry getEntry(Object entity) {
return entityEntries.get(entity);
}
@ -430,6 +437,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Remove an entity entry from the session cache
*/
@Override
public EntityEntry removeEntry(Object entity) {
return entityEntries.remove(entity);
}
@ -437,6 +445,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Is there an EntityEntry for this instance?
*/
@Override
public boolean isEntryFor(Object entity) {
return entityEntries.containsKey(entity);
}
@ -444,6 +453,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the collection entry for a persistent collection
*/
@Override
public CollectionEntry getCollectionEntry(PersistentCollection coll) {
return collectionEntries.get(coll);
}
@ -451,6 +461,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Adds an entity to the internal caches.
*/
@Override
public EntityEntry addEntity(
final Object entity,
final Status status,
@ -461,11 +472,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
final boolean existsInDatabase,
final EntityPersister persister,
final boolean disableVersionIncrement,
boolean lazyPropertiesAreUnfetched
) {
boolean lazyPropertiesAreUnfetched) {
addEntity( entityKey, entity );
return addEntry(
entity,
status,
@ -486,6 +494,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Generates an appropriate EntityEntry instance and adds it
* to the event source's internal caches.
*/
@Override
public EntityEntry addEntry(
final Object entity,
final Status status,
@ -519,10 +528,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
return e;
}
@Override
public boolean containsCollection(PersistentCollection collection) {
return collectionEntries.containsKey(collection);
}
@Override
public boolean containsProxy(Object entity) {
return proxiesByKey.containsValue( entity );
}
@ -534,6 +545,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return Whether the passed value represented an actual proxy which got initialized.
* @throws MappingException
*/
@Override
public boolean reassociateIfUninitializedProxy(Object value) throws MappingException {
if ( value instanceof ElementWrapper ) {
value = ( (ElementWrapper) value ).getElement();
@ -554,6 +566,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* If a deleted entity instance is re-saved, and it has a proxy, we need to
* reset the identifier of the proxy
*/
@Override
public void reassociateProxy(Object value, Serializable id) throws MappingException {
if ( value instanceof ElementWrapper ) {
value = ( (ElementWrapper) value ).getElement();
@ -591,6 +604,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* an exception if the proxy is uninitialized. If the given object
* is not a proxy, simply return the argument.
*/
@Override
public Object unproxy(Object maybeProxy) throws HibernateException {
if ( maybeProxy instanceof ElementWrapper ) {
maybeProxy = ( (ElementWrapper) maybeProxy ).getElement();
@ -619,6 +633,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return The unproxied instance.
* @throws HibernateException
*/
@Override
public Object unproxyAndReassociate(Object maybeProxy) throws HibernateException {
if ( maybeProxy instanceof ElementWrapper ) {
maybeProxy = ( (ElementWrapper) maybeProxy ).getElement();
@ -641,6 +656,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @param object The entity reference against which to perform the uniqueness check.
* @throws HibernateException
*/
@Override
public void checkUniqueness(EntityKey key, Object object) throws HibernateException {
Object entity = getEntity(key);
if ( entity == object ) {
@ -664,6 +680,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return An appropriately narrowed instance.
* @throws HibernateException
*/
@Override
public Object narrowProxy(Object proxy, EntityPersister persister, EntityKey key, Object object)
throws HibernateException {
@ -711,6 +728,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* third argument (the entity associated with the key) if no proxy exists. Init
* the proxy to the target implementation, if necessary.
*/
@Override
public Object proxyFor(EntityPersister persister, EntityKey key, Object impl)
throws HibernateException {
if ( !persister.hasProxy() ) return impl;
@ -728,6 +746,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* argument (the entity associated with the key) if no proxy exists.
* (slower than the form above)
*/
@Override
public Object proxyFor(Object impl) throws HibernateException {
EntityEntry e = getEntry(impl);
return proxyFor( e.getPersister(), e.getEntityKey(), impl );
@ -736,6 +755,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the entity that owns this persistent collection
*/
@Override
public Object getCollectionOwner(Serializable key, CollectionPersister collectionPersister) throws MappingException {
return getEntity( session.generateEntityKey( key, collectionPersister.getOwnerEntityPersister() ) );
}
@ -747,6 +767,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return the owner, if its entity ID is available from the collection's loaded key
* and the owner entity is in the persistence context; otherwise, returns null
*/
@Override
public Object getLoadedCollectionOwnerOrNull(PersistentCollection collection) {
CollectionEntry ce = getCollectionEntry( collection );
if ( ce.getLoadedPersister() == null ) {
@ -768,6 +789,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @param collection The persistent collection
* @return the owner ID if available from the collection's loaded key; otherwise, returns null
*/
@Override
public Serializable getLoadedCollectionOwnerIdOrNull(PersistentCollection collection) {
return getLoadedCollectionOwnerIdOrNull( getCollectionEntry( collection ) );
}
@ -790,6 +812,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* add a collection we just loaded up (still needs initializing)
*/
@Override
public void addUninitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) {
CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing);
addCollection(collection, ce, id);
@ -798,6 +821,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* add a detached uninitialized collection
*/
@Override
public void addUninitializedDetachedCollection(CollectionPersister persister, PersistentCollection collection) {
CollectionEntry ce = new CollectionEntry( persister, collection.getKey() );
addCollection( collection, ce, collection.getKey() );
@ -808,6 +832,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* application, with no database state or snapshot)
* @param collection The collection to be associated with the persistence context
*/
@Override
public void addNewCollection(CollectionPersister persister, PersistentCollection collection)
throws HibernateException {
addCollection(collection, persister);
@ -851,6 +876,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* add an (initialized) collection that was created by another session and passed
* into update() (ie. one with a snapshot and existing state on the database)
*/
@Override
public void addInitializedDetachedCollection(CollectionPersister collectionPersister, PersistentCollection collection)
throws HibernateException {
if ( collection.isUnreferenced() ) {
@ -866,6 +892,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* add a collection we just pulled out of the cache (does not need initializing)
*/
@Override
public CollectionEntry addInitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id)
throws HibernateException {
CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing);
@ -877,6 +904,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the collection instance associated with the <tt>CollectionKey</tt>
*/
@Override
public PersistentCollection getCollection(CollectionKey collectionKey) {
return collectionsByKey.get( collectionKey );
}
@ -885,6 +913,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Register a collection for non-lazy loading at the end of the
* two-phase load
*/
@Override
public void addNonLazyCollection(PersistentCollection collection) {
nonlazyCollections.add(collection);
}
@ -894,6 +923,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* the current two-phase load (actually, this is a no-op, unless this
* is the "outermost" load)
*/
@Override
public void initializeNonLazyCollections() throws HibernateException {
if ( loadCounter == 0 ) {
LOG.debug( "Initializing non-lazy collections" );
@ -917,6 +947,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the <tt>PersistentCollection</tt> object for an array
*/
@Override
public PersistentCollection getCollectionHolder(Object array) {
return arrayHolders.get(array);
}
@ -926,11 +957,13 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Associates a holder with an array - MUST be called after loading
* array, since the array instance is not created until endLoad().
*/
@Override
public void addCollectionHolder(PersistentCollection holder) {
//TODO:refactor + make this method private
arrayHolders.put( holder.getValue(), holder );
}
@Override
public PersistentCollection removeCollectionHolder(Object array) {
return arrayHolders.remove(array);
}
@ -938,6 +971,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get the snapshot of the pre-flush collection state
*/
@Override
public Serializable getSnapshot(PersistentCollection coll) {
return getCollectionEntry(coll).getSnapshot();
}
@ -947,6 +981,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* which might be a collection wrapper, an array, or an unwrapped
* collection. Return null if there is no entry.
*/
@Override
public CollectionEntry getCollectionEntryOrNull(Object collection) {
PersistentCollection coll;
if ( collection instanceof PersistentCollection ) {
@ -975,6 +1010,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Get an existing proxy by key
*/
@Override
public Object getProxy(EntityKey key) {
return proxiesByKey.get(key);
}
@ -982,6 +1018,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Add a proxy to the session cache
*/
@Override
public void addProxy(EntityKey key, Object proxy) {
proxiesByKey.put(key, proxy);
}
@ -995,6 +1032,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @param key The key of the entity proxy to be removed
* @return The proxy reference.
*/
@Override
public Object removeProxy(EntityKey key) {
if ( batchFetchQueue != null ) {
batchFetchQueue.removeBatchLoadableEntityKey( key );
@ -1003,35 +1041,15 @@ public class StatefulPersistenceContext implements PersistenceContext {
return proxiesByKey.remove( key );
}
/**
* Record the fact that an entity does not exist in the database
*
* @param key the primary key of the entity
*/
/*public void addNonExistantEntityKey(EntityKey key) {
nonExistantEntityKeys.add(key);
}*/
/**
* Record the fact that an entity does not exist in the database
*
* @param key a unique key of the entity
*/
/*public void addNonExistantEntityUniqueKey(EntityUniqueKey key) {
nonExistentEntityUniqueKeys.add(key);
}*/
/*public void removeNonExist(EntityKey key) {
nonExistantEntityKeys.remove(key);
}*/
/**
* Retrieve the set of EntityKeys representing nullifiable references
*/
@Override
public HashSet getNullifiableEntityKeys() {
return nullifiableEntityKeys;
}
@Override
public Map getEntitiesByKey() {
return entitiesByKey;
}
@ -1040,50 +1058,42 @@ public class StatefulPersistenceContext implements PersistenceContext {
return proxiesByKey;
}
@Override
public Map getEntityEntries() {
return entityEntries;
}
@Override
public Map getCollectionEntries() {
return collectionEntries;
}
@Override
public Map getCollectionsByKey() {
return collectionsByKey;
}
/**
* Do we already know that the entity does not exist in the
* database?
*/
/*public boolean isNonExistant(EntityKey key) {
return nonExistantEntityKeys.contains(key);
}*/
/**
* Do we already know that the entity does not exist in the
* database?
*/
/*public boolean isNonExistant(EntityUniqueKey key) {
return nonExistentEntityUniqueKeys.contains(key);
}*/
@Override
public int getCascadeLevel() {
return cascading;
}
@Override
public int incrementCascadeLevel() {
return ++cascading;
}
@Override
public int decrementCascadeLevel() {
return --cascading;
}
@Override
public boolean isFlushing() {
return flushing;
}
@Override
public void setFlushing(boolean flushing) {
this.flushing = flushing;
}
@ -1091,6 +1101,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Call this before beginning a two-phase load
*/
@Override
public void beforeLoad() {
loadCounter++;
}
@ -1098,13 +1109,16 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Call this after finishing a two-phase load
*/
@Override
public void afterLoad() {
loadCounter--;
}
@Override
public boolean isLoadFinished() {
return loadCounter == 0;
}
/**
* Returns a string representation of the object.
*
@ -1142,6 +1156,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* @return The id of the entityName instance which is said to own the child; null if an appropriate owner not
* located.
*/
@Override
public Serializable getOwnerId(String entityName, String propertyName, Object childEntity, Map mergeMap) {
final String collectionRole = entityName + '.' + propertyName;
final EntityPersister persister = session.getFactory().getEntityPersister( entityName );
@ -1204,11 +1219,10 @@ public class StatefulPersistenceContext implements PersistenceContext {
// NOTE: decided to put this here rather than in the above loop as I was nervous about the performance
// of the loop-in-loop especially considering this is far more likely the 'edge case'
if ( mergeMap != null ) {
Iterator mergeMapItr = mergeMap.entrySet().iterator();
while ( mergeMapItr.hasNext() ) {
final Map.Entry mergeMapEntry = ( Map.Entry ) mergeMapItr.next();
for ( Object o : mergeMap.entrySet() ) {
final Entry mergeMapEntry = (Entry) o;
if ( mergeMapEntry.getKey() instanceof HibernateProxy ) {
final HibernateProxy proxy = ( HibernateProxy ) mergeMapEntry.getKey();
final HibernateProxy proxy = (HibernateProxy) mergeMapEntry.getKey();
if ( persister.isSubclassEntityName( proxy.getHibernateLazyInitializer().getEntityName() ) ) {
boolean found = isFoundInParent(
propertyName,
@ -1253,6 +1267,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Search the persistence context for an index of the child object,
* given a collection role
*/
@Override
public Object getIndexInOwner(String entity, String property, Object childEntity, Map mergeMap) {
EntityPersister persister = session.getFactory()
@ -1311,8 +1326,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
Object childEntity,
EntityPersister persister,
CollectionPersister collectionPersister,
Object potentialParent
){
Object potentialParent){
Object collection = persister.getPropertyValue( potentialParent, property );
if ( collection!=null && Hibernate.isInitialized(collection) ) {
return collectionPersister.getCollectionType().indexOf(collection, childEntity);
@ -1326,6 +1340,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
* Record the fact that the association belonging to the keyed
* entity is null.
*/
@Override
public void addNullProperty(EntityKey ownerKey, String propertyName) {
nullAssociations.add( new AssociationKey(ownerKey, propertyName) );
}
@ -1333,6 +1348,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
/**
* Is the association property belonging to the keyed entity null?
*/
@Override
public boolean isPropertyNull(EntityKey ownerKey, String propertyName) {
return nullAssociations.contains( new AssociationKey(ownerKey, propertyName) );
}
@ -1341,6 +1357,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
nullAssociations.clear();
}
@Override
public boolean isReadOnly(Object entityOrProxy) {
if ( entityOrProxy == null ) {
throw new AssertionFailure( "object must be non-null." );
@ -1359,6 +1376,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return isReadOnly;
}
@Override
public void setReadOnly(Object object, boolean readOnly) {
if ( object == null ) {
throw new AssertionFailure( "object must be non-null." );
@ -1404,6 +1422,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
hasNonReadOnlyEntities = hasNonReadOnlyEntities || ! readOnly;
}
@Override
public void replaceDelayedEntityIdentityInsertKeys(EntityKey oldKey, Serializable generatedId) {
Object entity = entitiesByKey.remove( oldKey );
EntityEntry oldEntry = entityEntries.remove( entity );
@ -1514,9 +1533,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
oos.writeInt( nullifiableEntityKeys.size() );
if ( tracing ) LOG.trace("Starting serialization of [" + nullifiableEntityKeys.size() + "] nullifiableEntityKey entries");
Iterator<EntityKey> entityKeyIterator = nullifiableEntityKeys.iterator();
while ( entityKeyIterator.hasNext() ) {
EntityKey entry = entityKeyIterator.next();
for ( EntityKey entry : nullifiableEntityKeys ) {
entry.serialize( oos );
}
}
@ -1540,20 +1557,21 @@ public class StatefulPersistenceContext implements PersistenceContext {
int count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] entitiesByKey entries");
rtn.entitiesByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
rtn.entitiesByKey = new HashMap<EntityKey,Object>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitiesByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] entitiesByUniqueKey entries");
rtn.entitiesByUniqueKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
rtn.entitiesByUniqueKey = new HashMap<EntityUniqueKey,Object>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() );
}
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] proxiesByKey entries");
//noinspection unchecked
rtn.proxiesByKey = new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f );
for ( int i = 0; i < count; i++ ) {
EntityKey ek = EntityKey.deserialize( ois, session );
@ -1569,7 +1587,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] entitySnapshotsByKey entries");
rtn.entitySnapshotsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
rtn.entitySnapshotsByKey = new HashMap<EntityKey,Object>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.entitySnapshotsByKey.put( EntityKey.deserialize( ois, session ), ois.readObject() );
}
@ -1585,7 +1603,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] collectionsByKey entries");
rtn.collectionsByKey = new HashMap( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
rtn.collectionsByKey = new HashMap<CollectionKey,PersistentCollection>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) {
rtn.collectionsByKey.put( CollectionKey.deserialize( ois, session ), (PersistentCollection) ois.readObject() );
}
@ -1609,7 +1627,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] nullifiableEntityKey entries");
rtn.nullifiableEntityKeys = new HashSet();
rtn.nullifiableEntityKeys = new HashSet<EntityKey>();
for ( int i = 0; i < count; i++ ) {
rtn.nullifiableEntityKeys.add( EntityKey.deserialize( ois, session ) );
}
@ -1622,16 +1640,12 @@ public class StatefulPersistenceContext implements PersistenceContext {
return rtn;
}
/**
* @see org.hibernate.engine.spi.PersistenceContext#addChildParent(java.lang.Object, java.lang.Object)
*/
@Override
public void addChildParent(Object child, Object parent) {
parentsByChild.put(child, parent);
}
/**
* @see org.hibernate.engine.spi.PersistenceContext#removeChildParent(java.lang.Object)
*/
@Override
public void removeChildParent(Object child) {
parentsByChild.remove(child);
}
@ -1639,11 +1653,9 @@ public class StatefulPersistenceContext implements PersistenceContext {
private HashMap<String,List<Serializable>> insertedKeysMap;
/**
* {@inheritDoc}
*/
@Override
public void registerInsertedKey(EntityPersister persister, Serializable id) {
// we only are about regsitering these if the persister defines caching
// we only are worried about registering these if the persister defines caching
if ( persister.hasCache() ) {
if ( insertedKeysMap == null ) {
insertedKeysMap = new HashMap<String, List<Serializable>>();
@ -1658,9 +1670,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean wasInsertedDuringTransaction(EntityPersister persister, Serializable id) {
// again, we only really care if the entity is cached
if ( persister.hasCache() ) {

View File

@ -35,6 +35,7 @@ import org.hibernate.engine.jdbc.internal.TypeInfo;
*
* @author Steve Ebersole
*/
@SuppressWarnings( {"UnusedDeclaration"})
public interface ExtractedDatabaseMetaData {
public enum SQLStateType {
@ -47,6 +48,7 @@ public interface ExtractedDatabaseMetaData {
* Did the driver report to supporting scrollable result sets?
*
* @return True if the driver reported to support {@link java.sql.ResultSet#TYPE_SCROLL_INSENSITIVE}.
*
* @see java.sql.DatabaseMetaData#supportsResultSetType
*/
public boolean supportsScrollableResults();
@ -55,6 +57,7 @@ public interface ExtractedDatabaseMetaData {
* Did the driver report to supporting retrieval of generated keys?
*
* @return True if the if the driver reported to support calls to {@link java.sql.Statement#getGeneratedKeys}
*
* @see java.sql.DatabaseMetaData#supportsGetGeneratedKeys
*/
public boolean supportsGetGeneratedKeys();
@ -63,6 +66,7 @@ public interface ExtractedDatabaseMetaData {
* Did the driver report to supporting batched updates?
*
* @return True if the driver supports batched updates
*
* @see java.sql.DatabaseMetaData#supportsBatchUpdates
*/
public boolean supportsBatchUpdates();
@ -71,6 +75,7 @@ public interface ExtractedDatabaseMetaData {
* Did the driver report to support performing DDL within transactions?
*
* @return True if the drivers supports DDL statements within transactions.
*
* @see java.sql.DatabaseMetaData#dataDefinitionIgnoredInTransactions
*/
public boolean supportsDataDefinitionInTransaction();
@ -81,6 +86,7 @@ public interface ExtractedDatabaseMetaData {
*
* @return True if the driver/database performs an implicit commit of transaction when DDL statement is
* performed
*
* @see java.sql.DatabaseMetaData#dataDefinitionCausesTransactionCommit()
*/
public boolean doesDataDefinitionCauseTransactionCommit();
@ -89,6 +95,7 @@ public interface ExtractedDatabaseMetaData {
* Get the list of extra keywords (beyond standard SQL92 keywords) reported by the driver.
*
* @return The extra keywords used by this database.
*
* @see java.sql.DatabaseMetaData#getSQLKeywords()
*/
public Set<String> getExtraKeywords();
@ -98,6 +105,7 @@ public interface ExtractedDatabaseMetaData {
* the X/Open standard or the SQL92 standard.
*
* @return The SQLState strategy reportedly used by this driver/database.
*
* @see java.sql.DatabaseMetaData#getSQLStateType()
*/
public SQLStateType getSqlStateType();
@ -106,6 +114,7 @@ public interface ExtractedDatabaseMetaData {
* Did the driver report that updates to a LOB locator affect a copy of the LOB?
*
* @return True if updates to the state of a LOB locator update only a copy.
*
* @see java.sql.DatabaseMetaData#locatorsUpdateCopy()
*/
public boolean doesLobLocatorUpdateCopy();
@ -127,7 +136,9 @@ public interface ExtractedDatabaseMetaData {
/**
* Set of type info reported by the driver.
*
* @return
* @return The type information obtained from the driver.
*
* @see java.sql.DatabaseMetaData#getTypeInfo()
*/
public LinkedHashSet<TypeInfo> getTypeInfoSet();
}

View File

@ -95,7 +95,7 @@ public interface JdbcCoordinator extends Serializable {
*
* @return The {@link Connection} associated with the managed {@link #getLogicalConnection() logical connection}
*
* @see {@link LogicalConnection#close()}
* @see LogicalConnection#close
*/
public Connection close();

View File

@ -62,15 +62,15 @@ public interface LogicalConnection extends Serializable {
public Connection getConnection();
/**
* Retrieves the shareable connection proxy (see {@link org.hibernate.engine.jdbc.internal.proxy} for details).
* Retrieves the shareable connection proxy.
*
* @return The shareable connection proxy.
*/
public Connection getShareableConnectionProxy();
/**
* Retrieves a distinct connection proxy (see {@link org.hibernate.engine.jdbc.internal.proxy} for details). It
* is distinct in that it is not shared with others unless the caller explicitly shares it.
* Retrieves a distinct connection proxy. It is distinct in that it is not shared with others unless the caller
* explicitly shares it.
*
* @return The distinct connection proxy.
*/

View File

@ -32,12 +32,11 @@ import org.jboss.logging.Logger;
import org.hibernate.internal.CoreMessageLogger;
/**
* {@inheritDoc}
* Tracks information about loading of entities specific to a given result set. These can be hierarchical.
*
* @author Steve Ebersole
*/
public class EntityLoadContext {
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, EntityLoadContext.class.getName() );
private final LoadContexts loadContexts;
@ -50,7 +49,9 @@ public class EntityLoadContext {
}
void cleanup() {
if ( !hydratingEntities.isEmpty() ) LOG.hydratingEntitiesCount( hydratingEntities.size() );
if ( !hydratingEntities.isEmpty() ) {
LOG.hydratingEntitiesCount( hydratingEntities.size() );
}
hydratingEntities.clear();
}

View File

@ -42,18 +42,15 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.pretty.MessageHelper;
/**
* Maps {@link ResultSet result-sets} to specific contextual data
* related to processing that {@link ResultSet result-sets}.
* Maps {@link ResultSet result-sets} to specific contextual data related to processing that result set
* <p/>
* Implementation note: internally an {@link IdentityMap} is used to maintain
* the mappings; {@link IdentityMap} was chosen because I'd rather not be
* dependent upon potentially bad {@link ResultSet#equals} and {ResultSet#hashCode}
* implementations.
* Implementation note: internally an {@link IdentityMap} is used to maintain the mappings mainly because I'd
* rather not be dependent upon potentially bad {@link Object#equals} and {@link Object#hashCode} implementations on
* the JDBC result sets
* <p/>
* Considering the JDBC-redesign work, would further like this contextual info
* not mapped seperately, but available based on the result set being processed.
* This would also allow maintaining a single mapping as we could reliably get
* notification of the result-set closing...
* Considering the JDBC-redesign work, would further like this contextual info not mapped separately, but available
* based on the result set being processed. This would also allow maintaining a single mapping as we could reliably
* get notification of the result-set closing...
*
* @author Steve Ebersole
*/
@ -257,7 +254,8 @@ public class LoadContexts {
xrefLoadingCollectionEntries.remove(key);
}
/*package*/Map getLoadingCollectionXRefs() {
@SuppressWarnings( {"UnusedDeclaration"})
Map getLoadingCollectionXRefs() {
return xrefLoadingCollectionEntries;
}
@ -298,6 +296,7 @@ public class LoadContexts {
// Entity load contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// * currently, not yet used...
@SuppressWarnings( {"UnusedDeclaration"})
public EntityLoadContext getEntityLoadContext(ResultSet resultSet) {
EntityLoadContext context = null;
if ( entityLoadContexts == null ) {

View File

@ -40,7 +40,9 @@ import org.hibernate.type.Type;
public interface Mapping {
/**
* Allow access to the id generator factory, though this is only needed/allowed from configuration.
* @return
*
* @return Access to the identifier generator factory
*
* @deprecated temporary solution
*/
public IdentifierGeneratorFactory getIdentifierGeneratorFactory();

View File

@ -36,13 +36,23 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
/**
* Holds the state of the persistence context, including the
* first-level cache, entries, snapshots, proxies, etc.
* Represents the state of "stuff" Hibernate is tracking, including (not exhaustive):
* <ul>
* <li>entities</li>
* <li>collections</li>
* <li>snapshots</li>
* <li>proxies</li>
* </ul>
* <p/>
* Often referred to as the "first level cache".
*
* @author Gavin King
* @author Steve Ebersole
*/
@SuppressWarnings( {"JavaDoc"})
public interface PersistenceContext {
@SuppressWarnings( {"UnusedDeclaration"})
public boolean isStateless();
/**
@ -61,18 +71,28 @@ public interface PersistenceContext {
/**
* Add a collection which has no owner loaded
*
* @param key The collection key under which to add the collection
* @param collection The collection to add
*/
public void addUnownedCollection(CollectionKey key, PersistentCollection collection);
/**
* Get and remove a collection whose owner is not yet loaded,
* when its owner is being loaded
* Take ownership of a previously unowned collection, if one. This method returns {@code null} if no such
* collection was previous added () or was previously removed.
* <p/>
* This should indicate the owner is being loaded and we are ready to "link" them.
*
* @param key The collection key for which to locate a collection collection
*
* @return The unowned collection, or {@code null}
*/
public PersistentCollection useUnownedCollection(CollectionKey key);
/**
* Get the <tt>BatchFetchQueue</tt>, instantiating one if
* necessary.
* Get the {@link BatchFetchQueue}, instantiating one if necessary.
*
* @return The batch fetch queue in effect for this persistence context
*/
public BatchFetchQueue getBatchFetchQueue();
@ -84,10 +104,14 @@ public interface PersistenceContext {
/**
* @return false if we know for certain that all the entities are read-only
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean hasNonReadOnlyEntities();
/**
* Set the status of an entry
*
* @param entry The entry for which to set the status
* @param status The new status
*/
public void setEntryStatus(EntityEntry entry, Status status);
@ -97,94 +121,155 @@ public interface PersistenceContext {
public void afterTransactionCompletion();
/**
* Get the current state of the entity as known to the underlying
* database, or null if there is no corresponding row
* Get the current state of the entity as known to the underlying database, or null if there is no
* corresponding row
*
* @param id The identifier of the entity for which to grab a snapshot
* @param persister The persister of the entity.
*
* @return The entity's (non-cached) snapshot
*
* @see #getCachedDatabaseSnapshot
*/
public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister)
throws HibernateException;
public Object[] getDatabaseSnapshot(Serializable id, EntityPersister persister);
/**
* Get the current database state of the entity, using the cached state snapshot if one is available.
*
* @param key The entity key
*
* @return The entity's (non-cached) snapshot
*/
public Object[] getCachedDatabaseSnapshot(EntityKey key);
/**
* Get the values of the natural id fields as known to the underlying
* database, or null if the entity has no natural id or there is no
* corresponding row.
* Get the values of the natural id fields as known to the underlying database, or null if the entity has no
* natural id or there is no corresponding row.
*
* @param id The identifier of the entity for which to grab a snapshot
* @param persister The persister of the entity.
*
* @return The current (non-cached) snapshot of the entity's natural id state.
*/
public Object[] getNaturalIdSnapshot(Serializable id, EntityPersister persister)
throws HibernateException;
public Object[] getNaturalIdSnapshot(Serializable id, EntityPersister persister);
/**
* Add a canonical mapping from entity key to entity instance
*
* @param key The key under which to add an entity
* @param entity The entity instance to add
*/
public void addEntity(EntityKey key, Object entity);
/**
* Get the entity instance associated with the given
* <tt>EntityKey</tt>
* Get the entity instance associated with the given key
*
* @param key The key under which to look for an entity
*
* @return The matching entity, or {@code null}
*/
public Object getEntity(EntityKey key);
/**
* Is there an entity with the given key in the persistence context
*
* @param key The key under which to look for an entity
*
* @return {@code true} indicates an entity was found; otherwise {@code false}
*/
public boolean containsEntity(EntityKey key);
/**
* Remove an entity from the session cache, also clear
* up other state associated with the entity, all except
* for the <tt>EntityEntry</tt>
* Remove an entity. Also clears up all other state associated with the entity aside from the {@link EntityEntry}
*
* @param key The key whose matching entity should be removed
*
* @return The matching entity
*/
public Object removeEntity(EntityKey key);
/**
* Get an entity cached by unique key
*/
public Object getEntity(EntityUniqueKey euk);
/**
* Add an entity to the cache by unique key
*
* @param euk The unique (non-primary) key under which to add an entity
* @param entity The entity instance
*/
public void addEntity(EntityUniqueKey euk, Object entity);
/**
* Retreive the EntityEntry representation of the given entity.
* Get an entity cached by unique key
*
* @param entity The entity for which to locate the EntityEntry.
* @return The EntityEntry for the given entity.
* @param euk The unique (non-primary) key under which to look for an entity
*
* @return The located entity
*/
public Object getEntity(EntityUniqueKey euk);
/**
* Retrieve the {@link EntityEntry} representation of the given entity.
*
* @param entity The entity instance for which to locate the corresponding entry
* @return The entry
*/
public EntityEntry getEntry(Object entity);
/**
* Remove an entity entry from the session cache
*
* @param entity The entity instance for which to remove the corresponding entry
* @return The matching entry
*/
public EntityEntry removeEntry(Object entity);
/**
* Is there an EntityEntry for this instance?
* Is there an {@link EntityEntry} registration for this entity instance?
*
* @param entity The entity instance for which to check for an entry
*
* @return {@code true} indicates a matching entry was found.
*/
public boolean isEntryFor(Object entity);
/**
* Get the collection entry for a persistent collection
*
* @param coll The persistent collection instance for which to locate the collection entry
*
* @return The matching collection entry
*/
public CollectionEntry getCollectionEntry(PersistentCollection coll);
/**
* Adds an entity to the internal caches.
*/
public EntityEntry addEntity(final Object entity, final Status status,
final Object[] loadedState, final EntityKey entityKey, final Object version,
final LockMode lockMode, final boolean existsInDatabase,
final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched);
public EntityEntry addEntity(
final Object entity,
final Status status,
final Object[] loadedState,
final EntityKey entityKey,
final Object version,
final LockMode lockMode,
final boolean existsInDatabase,
final EntityPersister persister,
final boolean disableVersionIncrement,
boolean lazyPropertiesAreUnfetched);
/**
* Generates an appropriate EntityEntry instance and adds it
* to the event source's internal caches.
*/
public EntityEntry addEntry(final Object entity, final Status status,
final Object[] loadedState, final Object rowId, final Serializable id,
final Object version, final LockMode lockMode, final boolean existsInDatabase,
final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched);
public EntityEntry addEntry(
final Object entity,
final Status status,
final Object[] loadedState,
final Object rowId,
final Serializable id,
final Object version,
final LockMode lockMode,
final boolean existsInDatabase,
final EntityPersister persister,
final boolean disableVersionIncrement,
boolean lazyPropertiesAreUnfetched);
/**
* Is the given collection associated with this persistence context?
@ -428,6 +513,7 @@ public interface PersistenceContext {
/**
* Is a flush cycle currently in process?
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean isFlushing();
/**
@ -526,13 +612,14 @@ public interface PersistenceContext {
/**
* Is the entity or proxy read-only?
* <p/>
* To determine the default read-only/modifiable setting used for entities and proxies that are loaded into the
* session use {@link org.hibernate.Session#isDefaultReadOnly}
*
* To get the default read-only/modifiable setting used for
* entities and proxies that are loaded into the session:
* @see org.hibernate.Session#isDefaultReadOnly()
* @param entityOrProxy an entity or proxy
*
* @param entityOrProxy
* @return true, the object is read-only; false, the object is modifiable.
* @return {@code true} if the object is read-only; otherwise {@code false} to indicate that the object is
* modifiable.
*/
public boolean isReadOnly(Object entityOrProxy);
@ -551,35 +638,30 @@ public interface PersistenceContext {
* If the entity or proxy already has the specified read-only/modifiable
* setting, then this method does nothing.
*
* To set the default read-only/modifiable setting used for
* entities and proxies that are loaded into this persistence context:
* @see PersistenceContext#setDefaultReadOnly(boolean)
* @see org.hibernate.Session#setDefaultReadOnly(boolean)
* @param entityOrProxy an entity or proxy
* @param readOnly if {@code true}, the entity or proxy is made read-only; otherwise, the entity or proxy is made
* modifiable.
*
* To override this persistence context's read-only/modifiable setting
* for entities and proxies loaded by a Query:
* @see org.hibernate.Query#setReadOnly(boolean)
*
* @param entityOrProxy, an entity or HibernateProxy
* @param readOnly, if true, the entity or proxy is made read-only;
* if false, the entity or proxy is made modifiable.
*
* @see org.hibernate.Session#setReadOnly(Object, boolean)
* @see org.hibernate.Session#setDefaultReadOnly
* @see org.hibernate.Session#setReadOnly
* @see org.hibernate.Query#setReadOnly
*/
public void setReadOnly(Object entityOrProxy, boolean readOnly);
void replaceDelayedEntityIdentityInsertKeys(EntityKey oldKey, Serializable generatedId);
/**
* Put child/parent relation to cache for cascading op
* @param parent
* @param child
* Add a child/parent relation to cache for cascading op
*
* @param child The child of the relationship
* @param parent The parent of the relationship
*/
public void addChildParent(Object parent, Object child);
public void addChildParent(Object child, Object parent);
/**
* Remove child/parent relation from cache
* @param parent
*
* @param child The child to be removed.
*/
public void removeChildParent(Object child);

View File

@ -50,7 +50,6 @@ import org.hibernate.type.Type;
* @author Gavin King
*/
public final class QueryParameters {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, QueryParameters.class.getName());
private Type[] positionalParameterTypes;
@ -228,6 +227,7 @@ public final class QueryParameters {
this.optionalObject = optionalObject;
}
@SuppressWarnings( {"UnusedDeclaration"})
public boolean hasRowSelection() {
return rowSelection != null;
}
@ -252,6 +252,7 @@ public final class QueryParameters {
return resultTransformer;
}
@SuppressWarnings( {"UnusedDeclaration"})
public void setNamedParameters(Map<String,TypedValue> map) {
namedParameters = map;
}
@ -264,6 +265,7 @@ public final class QueryParameters {
positionalParameterValues = objects;
}
@SuppressWarnings( {"UnusedDeclaration"})
public void setRowSelection(RowSelection selection) {
rowSelection = selection;
}
@ -333,6 +335,7 @@ public final class QueryParameters {
return collectionKeys;
}
@SuppressWarnings( {"UnusedDeclaration"})
public void setCollectionKeys(Serializable[] collectionKeys) {
this.collectionKeys = collectionKeys;
}
@ -399,9 +402,16 @@ public final class QueryParameters {
/**
* Should entities and proxies loaded by the Query be put in read-only mode? If the
* read-only/modifiable setting was not initialized
* (i.e., QueryParameters#isReadOnlyInitialized() == false), then the default
* read-only/modifiable setting for the persistence context is returned instead.
* read-only/modifiable setting was not initialized (i.e., QueryParameters#isReadOnlyInitialized() == false),
* then the default read-only/modifiable setting for the persistence context is returned instead.
* <p/>
* The read-only/modifiable setting has no impact on entities/proxies returned by the
* query that existed in the session before the query was executed.
*
* @param session The originating session
*
* @return {@code true} indicates that entities and proxies loaded by the query will be put in read-only mode;
* {@code false} indicates that entities and proxies loaded by the query will be put in modifiable mode
*
* @see QueryParameters#isReadOnlyInitialized()
* @see QueryParameters#setReadOnly(boolean)
@ -410,29 +420,26 @@ public final class QueryParameters {
* The read-only/modifiable setting has no impact on entities/proxies returned by the
* query that existed in the session before the query was executed.
*
* @return true, entities and proxies loaded by the query will be put in read-only mode
* false, entities and proxies loaded by the query will be put in modifiable mode
*/
public boolean isReadOnly(SessionImplementor session) {
return ( isReadOnlyInitialized ?
isReadOnly() :
session.getPersistenceContext().isDefaultReadOnly()
);
return isReadOnlyInitialized
? isReadOnly()
: session.getPersistenceContext().isDefaultReadOnly();
}
/**
* Set the read-only/modifiable mode for entities and proxies loaded by the query.
* *
* <p/>
* The read-only/modifiable setting has no impact on entities/proxies returned by the
* query that existed in the session before the query was executed.
*
* @param readOnly if {@code true}, entities and proxies loaded by the query will be put in read-only mode; if
* {@code false}, entities and proxies loaded by the query will be put in modifiable mode
*
* @see QueryParameters#isReadOnlyInitialized()
* @see QueryParameters#isReadOnly(org.hibernate.engine.spi.SessionImplementor)
* @see QueryParameters#setReadOnly(boolean)
* @see org.hibernate.engine.spi.PersistenceContext#isDefaultReadOnly()
*
* The read-only/modifiable setting has no impact on entities/proxies returned by the
* query that existed in the session before the query was executed.
*
* @return true, entities and proxies loaded by the query will be put in read-only mode
* false, entities and proxies loaded by the query will be put in modifiable mode
*/
public void setReadOnly(boolean readOnly) {
this.readOnly = readOnly;
@ -455,8 +462,9 @@ public final class QueryParameters {
processFilters( sql, session.getLoadQueryInfluencers().getEnabledFilters(), session.getFactory() );
}
@SuppressWarnings( {"unchecked"})
public void processFilters(String sql, Map filters, SessionFactoryImplementor factory) {
if ( filters.size() == 0 || sql.indexOf( ParserHelper.HQL_VARIABLE_PREFIX ) < 0 ) {
if ( filters.size() == 0 || !sql.contains( ParserHelper.HQL_VARIABLE_PREFIX ) ) {
// HELLA IMPORTANT OPTIMIZATION!!!
processedPositionalParameterValues = getPositionalParameterValues();
processedPositionalParameterTypes = getPositionalParameterTypes();
@ -469,7 +477,7 @@ public final class QueryParameters {
.append( dialect.closeQuote() )
.toString();
StringTokenizer tokens = new StringTokenizer( sql, symbols, true );
StringBuffer result = new StringBuffer();
StringBuilder result = new StringBuilder();
List parameters = new ArrayList();
List parameterTypes = new ArrayList();
@ -532,6 +540,7 @@ public final class QueryParameters {
return isNaturalKeyLookup;
}
@SuppressWarnings( {"UnusedDeclaration"})
public void setNaturalKeyLookup(boolean isNaturalKeyLookup) {
this.isNaturalKeyLookup = isNaturalKeyLookup;
}

View File

@ -106,7 +106,7 @@ public interface SessionFactoryImplementor extends Mapping, SessionFactory {
/**
* Get the SQL dialect.
* <p/>
* Shorthand for {@link #getJdbcServices().getDialect()}.{@link JdbcServices#getDialect()}
* Shorthand for {@code getJdbcServices().getDialect()}
*
* @return The dialect
*/

View File

@ -24,9 +24,8 @@
*/
package org.hibernate.hql.internal.ast;
/**
* {@inheritDoc}
* Indicates an issue with the encountered with-clause.
*
* @author Steve Ebersole
*/

View File

@ -45,6 +45,8 @@ public interface SelectExpression {
* (e.g. 'as col0_O_')
*
* @param i The index of the select expression in the projection list.
*
* @throws antlr.SemanticException if a semantic error occurs
*/
void setScalarColumnText(int i) throws SemanticException;
@ -52,14 +54,15 @@ public interface SelectExpression {
* Sets the index and text for select expression in the projection list.
*
* @param i The index of the select expression in the projection list.
* @throws SemanticException
*
* @throws SemanticException if a semantic error occurs
*/
void setScalarColumn(int i) throws SemanticException;
/**
* Gets index of the select expression in the projection list.
*
* @returns The index of the select expression in the projection list.
* @return The index of the select expression in the projection list.
*/
int getScalarColumnIndex();
@ -75,12 +78,15 @@ public interface SelectExpression {
*
* @return true if the element is a constructor (e.g. new Foo).
*/
@SuppressWarnings( {"UnusedDeclaration"})
boolean isConstructor();
/**
* Returns true if this select expression represents an entity that can be returned.
*
* @return true if this select expression represents an entity that can be returned.
*
* @throws SemanticException if a semantic error occurs
*/
boolean isReturnableEntity() throws SemanticException;

View File

@ -58,25 +58,13 @@ import org.hibernate.type.Type;
* @author josh
*/
public class LiteralProcessor implements HqlSqlTokenTypes {
/**
* Indicates that Float and Double literal values should
* be treated using the SQL "exact" format (i.e., '.001')
*/
public static final int EXACT = 0;
/**
* Indicates that Float and Double literal values should
* be treated using the SQL "approximate" format (i.e., '1E-3')
*/
public static final int APPROXIMATE = 1;
/**
* In what format should Float and Double literal values be sent
* to the database?
* @see #EXACT, #APPROXIMATE
*/
public static int DECIMAL_LITERAL_FORMAT = EXACT;
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, LiteralProcessor.class.getName());
/**
* In what format should Float and Double literal values be sent to the database?
*/
public static DecimalLiteralFormat DECIMAL_LITERAL_FORMAT = DecimalLiteralFormat.EXACT;
private HqlSqlWalker walker;
public LiteralProcessor(HqlSqlWalker hqlSqlWalker) {
@ -186,6 +174,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
try {
LiteralType literalType = ( LiteralType ) type;
Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect();
//noinspection unchecked
node.setText( literalType.objectToSQLString( value, dialect ) );
}
catch ( Exception e ) {
@ -279,7 +268,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
}
}
BigDecimal number = null;
final BigDecimal number;
try {
number = new BigDecimal( literalValue );
}
@ -287,7 +276,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
throw new HibernateException( "Could not parse literal [" + text + "] as big-decimal", t );
}
return formatters[ DECIMAL_LITERAL_FORMAT ].format( number );
return DECIMAL_LITERAL_FORMAT.getFormatter().format( number );
}
@ -296,13 +285,18 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
}
private static class ExactDecimalFormatter implements DecimalFormatter {
public static final ExactDecimalFormatter INSTANCE = new ExactDecimalFormatter();
public String format(BigDecimal number) {
return number.toString();
}
}
private static class ApproximateDecimalFormatter implements DecimalFormatter {
public static final ApproximateDecimalFormatter INSTANCE = new ApproximateDecimalFormatter();
private static final String FORMAT_STRING = "#0.0E0";
public String format(BigDecimal number) {
try {
// TODO : what amount of significant digits need to be supported here?
@ -319,8 +313,30 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
}
}
private static final DecimalFormatter[] formatters = new DecimalFormatter[] {
new ExactDecimalFormatter(),
new ApproximateDecimalFormatter()
public static enum DecimalLiteralFormat {
/**
* Indicates that Float and Double literal values should
* be treated using the SQL "exact" format (i.e., '.001')
*/
EXACT {
@Override
public DecimalFormatter getFormatter() {
return ExactDecimalFormatter.INSTANCE;
}
},
/**
* Indicates that Float and Double literal values should
* be treated using the SQL "approximate" format (i.e., '1E-3')
*/
@SuppressWarnings( {"UnusedDeclaration"})
APPROXIMATE {
@Override
public DecimalFormatter getFormatter() {
return ApproximateDecimalFormatter.INSTANCE;
}
};
public abstract DecimalFormatter getFormatter();
}
}

View File

@ -33,15 +33,17 @@ import org.hibernate.service.spi.SessionFactoryServiceRegistry;
* <p/>
* IMPL NOTE: called during session factory initialization (constructor), so not all parts of the passed session factory
* will be available.
*
* @todo : the signature here *will* change, guaranteed
*
* @todo : better name ?
* <p/>
* For more information, see the following jiras:<ul>
* <li><a href="https://hibernate.onjira.com/browse/HHH-5562">HHH-5562</a></li>
* <li><a href="https://hibernate.onjira.com/browse/HHH-6081">HHH-6081</a></li>
* </ul>
*
* @author Steve Ebersole
* @since 4.0
* @jira HHH-5562
* @jira HHH-6081
*
* @todo : the signature here *will* change, guaranteed
* @todo : better name ?
*/
public interface Integrator {

View File

@ -29,7 +29,6 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.jboss.logging.Logger;
@ -105,11 +104,13 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
// inserts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public Serializable insert(Object entity) {
errorIfClosed();
return insert(null, entity);
}
@Override
public Serializable insert(String entityName, Object entity) {
errorIfClosed();
EntityPersister persister = getEntityPersister( entityName, entity );
@ -136,11 +137,13 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
// deletes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public void delete(Object entity) {
errorIfClosed();
delete(null, entity);
}
@Override
public void delete(String entityName, Object entity) {
errorIfClosed();
EntityPersister persister = getEntityPersister(entityName, entity);
@ -152,11 +155,13 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
// updates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public void update(Object entity) {
errorIfClosed();
update(null, entity);
}
@Override
public void update(String entityName, Object entity) {
errorIfClosed();
EntityPersister persister = getEntityPersister(entityName, entity);
@ -178,18 +183,22 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
// loading ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public Object get(Class entityClass, Serializable id) {
return get( entityClass.getName(), id );
}
@Override
public Object get(Class entityClass, Serializable id, LockMode lockMode) {
return get( entityClass.getName(), id, lockMode );
}
@Override
public Object get(String entityName, Serializable id) {
return get(entityName, id, LockMode.NONE);
}
@Override
public Object get(String entityName, Serializable id, LockMode lockMode) {
errorIfClosed();
Object result = getFactory().getEntityPersister(entityName)
@ -200,18 +209,22 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return result;
}
@Override
public void refresh(Object entity) {
refresh( bestGuessEntityName( entity ), entity, LockMode.NONE );
}
@Override
public void refresh(String entityName, Object entity) {
refresh( entityName, entity, LockMode.NONE );
}
@Override
public void refresh(Object entity, LockMode lockMode) {
refresh( bestGuessEntityName( entity ), entity, lockMode );
}
@Override
public void refresh(String entityName, Object entity, LockMode lockMode) {
final EntityPersister persister = this.getEntityPersister( entityName, entity );
final Serializable id = persister.getIdentifier( entity, this );
@ -245,25 +258,28 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
UnresolvableObjectException.throwIfNull( result, id, persister.getEntityName() );
}
@Override
public Object immediateLoad(String entityName, Serializable id)
throws HibernateException {
throw new SessionException("proxies cannot be fetched by a stateless session");
}
@Override
public void initializeCollection(
PersistentCollection collection,
boolean writing) throws HibernateException {
throw new SessionException("collections cannot be fetched by a stateless session");
}
@Override
public Object instantiate(
String entityName,
Serializable id) throws HibernateException {
errorIfClosed();
return getFactory().getEntityPersister( entityName )
.instantiate( id, this );
return getFactory().getEntityPersister( entityName ).instantiate( id, this );
}
@Override
public Object internalLoad(
String entityName,
Serializable id,
@ -287,29 +303,34 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return get( entityName, id );
}
@Override
public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException {
throw new UnsupportedOperationException();
}
@Override
public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters)
throws HibernateException {
throw new UnsupportedOperationException();
}
@Override
public List listFilter(Object collection, String filter, QueryParameters queryParameters)
throws HibernateException {
throw new UnsupportedOperationException();
}
@Override
public boolean isOpen() {
return !isClosed();
}
@Override
public void close() {
managedClose();
}
@Override
public ConnectionReleaseMode getConnectionReleaseMode() {
return factory.getSettings().getConnectionReleaseMode();
}
@ -319,18 +340,22 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return true;
}
@Override
public boolean isAutoCloseSessionEnabled() {
return factory.getSettings().isAutoCloseSessionEnabled();
}
@Override
public boolean isFlushBeforeCompletionEnabled() {
return true;
}
@Override
public boolean isFlushModeNever() {
return false;
}
@Override
public void managedClose() {
if ( isClosed() ) {
throw new SessionException( "Session was already closed!" );
@ -339,11 +364,13 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
setClosed();
}
@Override
public void managedFlush() {
errorIfClosed();
getTransactionCoordinator().getJdbcCoordinator().executeBatch();
}
@Override
public boolean shouldAutoClose() {
return isAutoCloseSessionEnabled() && !isClosed();
}
@ -368,6 +395,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return sql;
}
@Override
public String bestGuessEntityName(Object object) {
if (object instanceof HibernateProxy) {
object = ( (HibernateProxy) object ).getHibernateLazyInitializer().getImplementation();
@ -375,11 +403,13 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return guessEntityName(object);
}
@Override
public Connection connection() {
errorIfClosed();
return transactionCoordinator.getJdbcCoordinator().getLogicalConnection().getDistinctConnectionProxy();
}
@Override
public int executeUpdate(String query, QueryParameters queryParameters)
throws HibernateException {
errorIfClosed();
@ -398,18 +428,22 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return result;
}
@Override
public CacheMode getCacheMode() {
return CacheMode.IGNORE;
}
@Override
public int getDontFlushFromFind() {
return 0;
}
@Override
public Map getEnabledFilters() {
return CollectionHelper.EMPTY_MAP;
}
@Override
public Serializable getContextEntityIdentifier(Object object) {
errorIfClosed();
return null;
@ -419,6 +453,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return EntityMode.POJO;
}
@Override
public EntityPersister getEntityPersister(String entityName, Object object)
throws HibernateException {
errorIfClosed();
@ -430,49 +465,59 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
}
}
@Override
public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException {
errorIfClosed();
return null;
}
@Override
public Type getFilterParameterType(String filterParameterName) {
throw new UnsupportedOperationException();
}
@Override
public Object getFilterParameterValue(String filterParameterName) {
throw new UnsupportedOperationException();
}
@Override
public FlushMode getFlushMode() {
return FlushMode.COMMIT;
}
@Override
public Interceptor getInterceptor() {
return EmptyInterceptor.INSTANCE;
}
@Override
public PersistenceContext getPersistenceContext() {
return temporaryPersistenceContext;
}
@Override
public long getTimestamp() {
throw new UnsupportedOperationException();
}
@Override
public String guessEntityName(Object entity) throws HibernateException {
errorIfClosed();
return entity.getClass().getName();
}
@Override
public boolean isConnected() {
return transactionCoordinator.getJdbcCoordinator().getLogicalConnection().isPhysicallyConnected();
}
@Override
public boolean isTransactionInProgress() {
return transactionCoordinator.isTransactionInProgress();
}
@Override
public void setAutoClear(boolean enabled) {
throw new UnsupportedOperationException();
}
@ -482,19 +527,23 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
throw new UnsupportedOperationException();
}
@Override
public void setCacheMode(CacheMode cm) {
throw new UnsupportedOperationException();
}
@Override
public void setFlushMode(FlushMode fm) {
throw new UnsupportedOperationException();
}
@Override
public Transaction getTransaction() throws HibernateException {
errorIfClosed();
return transactionCoordinator.getTransaction();
}
@Override
public Transaction beginTransaction() throws HibernateException {
errorIfClosed();
Transaction result = getTransaction();
@ -502,22 +551,17 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return result;
}
@Override
public boolean isEventSource() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isDefaultReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
public void setDefaultReadOnly(boolean readOnly) throws HibernateException {
if ( readOnly == true ) {
if ( readOnly ) {
throw new UnsupportedOperationException();
}
}
@ -526,6 +570,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
//TODO: COPY/PASTE FROM SessionImpl, pull up!
@Override
public List list(String query, QueryParameters queryParameters) throws HibernateException {
errorIfClosed();
queryParameters.validateParameters();
@ -545,30 +590,35 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
public void afterOperation(boolean success) {
if ( ! transactionCoordinator.isTransactionInProgress() ) {
transactionCoordinator.afterNonTransactionalQuery( success );;
transactionCoordinator.afterNonTransactionalQuery( success );
}
}
@Override
public Criteria createCriteria(Class persistentClass, String alias) {
errorIfClosed();
return new CriteriaImpl( persistentClass.getName(), alias, this );
}
@Override
public Criteria createCriteria(String entityName, String alias) {
errorIfClosed();
return new CriteriaImpl(entityName, alias, this);
}
@Override
public Criteria createCriteria(Class persistentClass) {
errorIfClosed();
return new CriteriaImpl( persistentClass.getName(), this );
}
@Override
public Criteria createCriteria(String entityName) {
errorIfClosed();
return new CriteriaImpl(entityName, this);
}
@Override
public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) {
errorIfClosed();
String entityName = criteria.getEntityOrClassName();
@ -582,6 +632,8 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return loader.scroll(this, scrollMode);
}
@Override
@SuppressWarnings( {"unchecked"})
public List list(CriteriaImpl criteria) throws HibernateException {
errorIfClosed();
String[] implementors = factory.getImplementors( criteria.getEntityOrClassName() );
@ -624,6 +676,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return ( OuterJoinLoadable ) persister;
}
@Override
public List listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters)
throws HibernateException {
errorIfClosed();
@ -642,6 +695,7 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return results;
}
@Override
public ScrollableResults scrollCustomQuery(CustomQuery customQuery, QueryParameters queryParameters)
throws HibernateException {
errorIfClosed();
@ -649,52 +703,47 @@ public class StatelessSessionImpl extends AbstractSessionImpl implements Statele
return loader.scroll( queryParameters, this );
}
@Override
public ScrollableResults scroll(String query, QueryParameters queryParameters) throws HibernateException {
errorIfClosed();
HQLQueryPlan plan = getHQLQueryPlan( query, false );
return plan.performScroll( queryParameters, this );
}
@Override
public void afterScrollOperation() {
temporaryPersistenceContext.clear();
}
public void flush() {}
@Override
public void flush() {
}
@Override
public NonFlushedChanges getNonFlushedChanges() {
throw new UnsupportedOperationException();
}
@Override
public void applyNonFlushedChanges(NonFlushedChanges nonFlushedChanges) {
throw new UnsupportedOperationException();
}
@Override
public String getFetchProfile() {
return null;
}
@Override
public LoadQueryInfluencers getLoadQueryInfluencers() {
return LoadQueryInfluencers.NONE;
}
public void registerInsertedKey(EntityPersister persister, Serializable id) {
errorIfClosed();
// nothing to do
}
public boolean wasInsertedDuringTransaction(EntityPersister persister, Serializable id) {
errorIfClosed();
// not in any meaning we need to worry about here.
return false;
}
public void setFetchProfile(String name) {}
protected boolean autoFlushIfRequired(Set querySpaces) throws HibernateException {
// no auto-flushing to support in stateless session
return false;
@Override
public void setFetchProfile(String name) {
}
@Override
public int executeNativeUpdate(NativeSQLQuerySpecification nativeSQLQuerySpecification,
QueryParameters queryParameters) throws HibernateException {
errorIfClosed();

View File

@ -1,7 +1,7 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
~ Copyright (c) 2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Inc.
@ -26,8 +26,7 @@
<head></head>
<body>
<p>
An internal package containing mostly implementations of central Hibernate APIs of the
{@link org.hibernate} package.
An internal package containing mostly implementations of central Hibernate APIs.
</p>
</body>
</html>

View File

@ -1,10 +1,10 @@
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
~ Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
~ indicated by the @author tags or express copyright attribution
~ statements applied by the authors. All third-party contributions are
~ distributed under license by Red Hat Middleware LLC.
~ distributed under license by Red Hat Inc.
~
~ This copyrighted material is made available to anyone wishing to use, modify,
~ copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,14 +20,13 @@
~ Free Software Foundation, Inc.
~ 51 Franklin Street, Fifth Floor
~ Boston, MA 02110-1301 USA
~
-->
<html>
<head></head>
<body>
<p>
Essentially defines {@link Work}, {@link ReturningWork} and {@link Expectation} as well as some exceptions
Essentially defines {@link org.hibernate.jdbc.Work}, {@link org.hibernate.jdbc.ReturningWork} and
{@link org.hibernate.jdbc.Expectation} as well as some exceptions
</p>
</body>
</html>

View File

@ -41,8 +41,8 @@ public interface UniqueEntityLoader {
* load the entity state into the given (uninitialized) object.
*
* @deprecated use {@link #load(java.io.Serializable, Object, SessionImplementor, LockOptions)} instead.
* @noinspection JavaDoc
*/
@SuppressWarnings( {"JavaDoc"})
public Object load(Serializable id, Object optionalObject, SessionImplementor session) throws HibernateException;
/**

View File

@ -36,6 +36,7 @@ import org.hibernate.type.Type;
* @see org.hibernate.SessionFactory#getClassMetadata(Class)
* @author Gavin King
*/
@SuppressWarnings( {"JavaDoc"})
public interface ClassMetadata {
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -135,6 +136,7 @@ public interface ClassMetadata {
/**
* Return the values of the mapped properties of the object
*/
@SuppressWarnings( {"UnusedDeclaration"})
public Object[] getPropertyValuesToInsert(Object entity, Map mergeMap, SessionImplementor session)
throws HibernateException;
@ -186,8 +188,8 @@ public interface ClassMetadata {
* Get the identifier of an instance (throw an exception if no identifier property)
*
* @deprecated Use {@link #getIdentifier(Object,SessionImplementor)} instead
* @noinspection JavaDoc
*/
@SuppressWarnings( {"JavaDoc"})
public Serializable getIdentifier(Object object) throws HibernateException;
/**
@ -213,6 +215,7 @@ public interface ClassMetadata {
/**
* Does the class implement the <tt>Lifecycle</tt> interface?
*/
@SuppressWarnings( {"UnusedDeclaration"})
public boolean implementsLifecycle();
/**

View File

@ -56,7 +56,7 @@ public interface AssociationAttributeBinding extends AttributeBinding {
/**
* Temporary. Needed for integration with legacy {@link org.hibernate.mapping} configuration of persisters.
* Temporary. Needed for integration with legacy org.hibernate.mapping configuration of persisters.
*
* @deprecated
*/

View File

@ -29,6 +29,7 @@ package org.hibernate.metamodel.binding;
* @author Gail Badner
* @author Steve Ebersole
*/
@SuppressWarnings( {"JavaDoc", "UnusedDeclaration"})
public interface SingularAssociationAttributeBinding extends SingularAttributeBinding, AssociationAttributeBinding {
/**
* Is this association based on a property reference (non PK column(s) as target of FK)?

View File

@ -41,7 +41,8 @@ public interface Exportable {
/**
* Gets the SQL strings for creating the database object.
*
* @param dialect
* @param dialect The dialect for which to generate the SQL creation strings
*
* @return the SQL strings for creating the database object.
*/
public String[] sqlCreateStrings(Dialect dialect);
@ -49,7 +50,9 @@ public interface Exportable {
/**
* Gets the SQL strings for dropping the database object.
*
* @param dialect@return the SQL strings for dropping the database object.
* @param dialect The dialect for which to generate the SQL drop strings
*
* @return the SQL strings for dropping the database object.
*/
public String[] sqlDropStrings(Dialect dialect);

View File

@ -41,7 +41,7 @@ public class Identifier {
*
* @param name The name
*
* @return
* @return The identifier form of the name.
*/
public static Identifier toIdentifier(String name) {
if ( StringHelper.isEmpty( name ) ) {
@ -101,9 +101,9 @@ public class Identifier {
* enclosed in dialect-specific open- and end-quotes; otherwise,
* simply return the identifier name.
*
* @param dialect
* @return if quoted, identifier name enclosed in dialect-specific
* open- and end-quotes; otherwise, the identifier name.
* @param dialect The dialect whose dialect-specific quoting should be used.
* @return if quoted, identifier name enclosed in dialect-specific open- and end-quotes; otherwise, the
* identifier name.
*/
public String encloseInQuotesIfQuoted(Dialect dialect) {
return isQuoted ?

View File

@ -651,8 +651,8 @@ public interface EntityPersister extends OptimisticCacheSource {
* Get the identifier of an instance (throw an exception if no identifier property)
*
* @deprecated Use {@link #getIdentifier(Object,SessionImplementor)} instead
* @noinspection JavaDoc
*/
@SuppressWarnings( {"JavaDoc"})
public Serializable getIdentifier(Object object) throws HibernateException;
/**

View File

@ -61,7 +61,7 @@ public abstract class AbstractSerializableProxy implements Serializable {
* This method should only be called during deserialization, before associating the
* AbstractLazyInitializer with a session.
*
* @param li, the read-only/modifiable setting to use when
* @param li the read-only/modifiable setting to use when
* associated with a session; null indicates that the default should be used.
* @throws IllegalStateException if isReadOnlySettingAvailable() == true
*/

View File

@ -117,12 +117,12 @@ public interface LazyInitializer {
* detached or its associated session is closed.
*
* To check if the read-only/modifiable setting is available:
* @see org.hibernate.proxy.LazyInitializer#isReadOnlySettingAvailable()
*
* @return true, if this proxy is read-only; false, otherwise
* @throws org.hibernate.TransientObjectException if the proxy is detached (getSession() == null)
* @throws org.hibernate.SessionException if the proxy is associated with a sesssion that is closed
*
* @see org.hibernate.proxy.LazyInitializer#isReadOnlySettingAvailable()
* @see org.hibernate.Session#isReadOnly(Object entityOrProxy)
*/
public boolean isReadOnly();
@ -141,9 +141,9 @@ public interface LazyInitializer {
* @param readOnly if true, the associated proxy is made read-only;
* if false, the associated proxy is made modifiable.
* @throws org.hibernate.TransientObjectException if the proxy is not association with a session
* @throws org.hibernate.SessionException if the proxy is associated with a sesssion that is closed
* @throws org.hibernate.SessionException if the proxy is associated with a session that is closed
*
* @see {@link org.hibernate.Session#setReadOnly(Object entityOrProxy, boolean readOnly)}
* @see org.hibernate.Session#setReadOnly(Object entityOrProxy, boolean readOnly)
*/
public void setReadOnly(boolean readOnly);

View File

@ -42,10 +42,12 @@ public class InFragment {
public static final String NOT_NULL = "not null";
private String columnName;
private List values = new ArrayList();
private List<Object> values = new ArrayList<Object>();
/**
* @param value, an SQL literal, NULL, or NOT_NULL
* @param value an SQL literal, NULL, or NOT_NULL
*
* @return {@code this}, for method chaining
*/
public InFragment addValue(Object value) {
values.add(value);
@ -110,9 +112,7 @@ public class InFragment {
}
for (Object value : values) {
if (NULL.equals(value)) {
;
} else {
if ( ! NULL.equals(value) ) {
buf.append(value);
buf.append(", ");
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 20102011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.transform;
@ -32,7 +31,7 @@ import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.type.Type;
/**
* A ResultTransformer that is used to transfor tuples to a value(s)
* A ResultTransformer that is used to transform tuples to a value(s)
* that can be cached.
*
* @author Gail Badner
@ -74,16 +73,17 @@ public class CacheableResultTransformer implements ResultTransformer {
* included in the tuple; the number of true values equals
* the length of the tuple that will be transformed;
* must be non-null
*
* @return a CacheableResultTransformer that is used to transform
* tuples to a value(s) that can be cached.
*/
public static CacheableResultTransformer create(ResultTransformer transformer,
public static CacheableResultTransformer create(
ResultTransformer transformer,
String[] aliases,
boolean[] includeInTuple) {
return transformer instanceof TupleSubsetResultTransformer ?
create( ( TupleSubsetResultTransformer ) transformer, aliases, includeInTuple ) :
create( includeInTuple )
;
return transformer instanceof TupleSubsetResultTransformer
? create( ( TupleSubsetResultTransformer ) transformer, aliases, includeInTuple )
: create( includeInTuple );
}
/**
@ -100,10 +100,12 @@ public class CacheableResultTransformer implements ResultTransformer {
* included in the tuple; the number of true values equals
* the length of the tuple that will be transformed;
* must be non-null
*
* @return a CacheableResultTransformer that is used to transform
* tuples to a value(s) that can be cached.
*/
private static CacheableResultTransformer create(TupleSubsetResultTransformer transformer,
private static CacheableResultTransformer create(
TupleSubsetResultTransformer transformer,
String[] aliases,
boolean[] includeInTuple) {
if ( transformer == null ) {
@ -131,6 +133,7 @@ public class CacheableResultTransformer implements ResultTransformer {
* included in the tuple; the number of true values equals
* the length of the tuple that will be transformed;
* must be non-null
*
* @return a CacheableResultTransformer that is used to transform
* tuples to a value(s) that can be cached.
*/
@ -163,9 +166,7 @@ public class CacheableResultTransformer implements ResultTransformer {
}
}
/**
* {@inheritDoc}
*/
@Override
public Object transformTuple(Object[] tuple, String aliases[]) {
if ( aliases != null && aliases.length != tupleLength ) {
throw new IllegalStateException(
@ -191,9 +192,13 @@ public class CacheableResultTransformer implements ResultTransformer {
* @param transformedResults - results that were previously transformed
* @param aliases - the aliases that correspond to the untransformed tuple;
* @param transformer - the transformer for the re-transformation
* @param includeInTuple indicates the indexes of
*
* @return transformedResults, with each element re-transformed (if nececessary)
*/
public List retransformResults(List transformedResults,
@SuppressWarnings( {"unchecked"})
public List retransformResults(
List transformedResults,
String aliases[],
ResultTransformer transformer,
boolean[] includeInTuple) {
@ -244,6 +249,7 @@ public class CacheableResultTransformer implements ResultTransformer {
* @param results - results that were previously transformed
* @return results, with each element untransformed (if nececessary)
*/
@SuppressWarnings( {"unchecked"})
public List untransformToTuples(List results) {
if ( includeInTransformIndex == null ) {
results = ACTUAL_TRANSFORMER.untransformToTuples(
@ -264,21 +270,13 @@ public class CacheableResultTransformer implements ResultTransformer {
return results;
}
/**
* Returns the result types for the transformed value.
* @param tupleResultTypes
* @return
*/
public Type[] getCachedResultTypes(Type[] tupleResultTypes) {
return tupleLength != tupleSubsetLength ?
index( tupleResultTypes.getClass(), tupleResultTypes ) :
tupleResultTypes
;
return tupleLength != tupleSubsetLength
? index( tupleResultTypes.getClass(), tupleResultTypes )
: tupleResultTypes;
}
/**
* {@inheritDoc}
*/
@Override
public List transformList(List list) {
return list;
}
@ -320,20 +318,10 @@ public class CacheableResultTransformer implements ResultTransformer {
CacheableResultTransformer that = ( CacheableResultTransformer ) o;
if ( tupleLength != that.tupleLength ) {
return false;
}
if ( tupleSubsetLength != that.tupleSubsetLength ) {
return false;
}
if ( !Arrays.equals( includeInTuple, that.includeInTuple ) ) {
return false;
}
if ( !Arrays.equals( includeInTransformIndex, that.includeInTransformIndex ) ) {
return false;
}
return true;
return tupleLength == that.tupleLength
&& tupleSubsetLength == that.tupleSubsetLength
&& Arrays.equals( includeInTuple, that.includeInTuple )
&& Arrays.equals( includeInTransformIndex, that.includeInTransformIndex );
}
@Override

View File

@ -61,8 +61,8 @@ public interface EntityTuplizer extends Tuplizer {
* @throws HibernateException
*
* @deprecated Use {@link #instantiate(Serializable, SessionImplementor)} instead.
* @noinspection JavaDoc
*/
@SuppressWarnings( {"JavaDoc"})
public Object instantiate(Serializable id) throws HibernateException;
/**
@ -108,8 +108,8 @@ public interface EntityTuplizer extends Tuplizer {
* @param id The value to be injected as the identifier.
*
* @deprecated Use {@link #setIdentifier(Object, Serializable, SessionImplementor)} instead.
* @noinspection JavaDoc
*/
@SuppressWarnings( {"JavaDoc"})
public void setIdentifier(Object entity, Serializable id) throws HibernateException;
/**
@ -133,6 +133,7 @@ public interface EntityTuplizer extends Tuplizer {
*
* @deprecated Use {@link #resetIdentifier(Object, Serializable, Object, SessionImplementor)} instead
*/
@SuppressWarnings( {"UnusedDeclaration"})
public void resetIdentifier(Object entity, Serializable currentId, Object currentVersion);
/**
@ -151,7 +152,7 @@ public interface EntityTuplizer extends Tuplizer {
*
* @param entity The entity from which to extract the version value.
* @return The value of the version property, or null if not versioned.
* @throws HibernateException
* @throws HibernateException Indicates a problem accessing the version property
*/
public Object getVersion(Object entity) throws HibernateException;
@ -161,7 +162,7 @@ public interface EntityTuplizer extends Tuplizer {
* @param entity The entity into which to inject the value.
* @param i The property's index.
* @param value The property value to inject.
* @throws HibernateException
* @throws HibernateException Indicates a problem access the property
*/
public void setPropertyValue(Object entity, int i, Object value) throws HibernateException;
@ -171,7 +172,7 @@ public interface EntityTuplizer extends Tuplizer {
* @param entity The entity into which to inject the value.
* @param propertyName The name of the property.
* @param value The property value to inject.
* @throws HibernateException
* @throws HibernateException Indicates a problem access the property
*/
public void setPropertyValue(Object entity, String propertyName, Object value) throws HibernateException;
@ -182,7 +183,7 @@ public interface EntityTuplizer extends Tuplizer {
* @param mergeMap a map of instances being merged to merged instances
* @param session The session in which the resuest is being made.
* @return The insertable property values.
* @throws HibernateException
* @throws HibernateException Indicates a problem access the properties
*/
public Object[] getPropertyValuesToInsert(Object entity, Map mergeMap, SessionImplementor session)
throws HibernateException;
@ -193,7 +194,7 @@ public interface EntityTuplizer extends Tuplizer {
* @param entity The entity from which to extract the property value.
* @param propertyName The name of the property for which to extract the value.
* @return The current value of the given property on the given entity.
* @throws HibernateException
* @throws HibernateException Indicates a problem access the property
*/
public Object getPropertyValue(Object entity, String propertyName) throws HibernateException;
@ -252,6 +253,8 @@ public interface EntityTuplizer extends Tuplizer {
/**
* Is it an instrumented POJO?
*
* @return {@code true} if the entity class is instrumented; {@code false} otherwise.
*/
public boolean isInstrumented();

View File

@ -167,14 +167,15 @@ public interface Type extends Serializable {
*/
public Class getReturnedClass();
@SuppressWarnings( {"UnusedDeclaration"})
public boolean isXMLElement();
/**
* Compare two instances of the class mapped by this type for persistence "equality" (equality of persistent
* state) taking a shortcut for entity references.
* <p/>
* For most types this should equate to {@link #equals} check on the values. For associations the implication
* is a bit different. For most types it is conceivable to simply delegate to {@link #isEqual}
* For most types this should equate to an {@link Object#equals equals} check on the values. For associations
* the implication is a bit different. For most types it is conceivable to simply delegate to {@link #isEqual}
*
* @param x The first value
* @param y The second value
@ -222,7 +223,7 @@ public interface Type extends Serializable {
/**
* Get a hash code, consistent with persistence "equality". Again for most types the normal usage is to
* delegate to the value's {@link #hashCode}.
* delegate to the value's {@link Object#hashCode hashCode}.
*
* @param x The value for which to retrieve a hash code
* @return The hash code
@ -233,7 +234,7 @@ public interface Type extends Serializable {
/**
* Get a hash code, consistent with persistence "equality". Again for most types the normal usage is to
* delegate to the value's {@link #hashCode}.
* delegate to the value's {@link Object#hashCode hashCode}.
*
* @param x The value for which to retrieve a hash code
* @param factory The session factory
@ -301,97 +302,109 @@ public interface Type extends Serializable {
throws HibernateException;
/**
* Retrieve an instance of the mapped class from a JDBC resultset. Implementors
* Extract a value of the {@link #getReturnedClass() mapped class} from the JDBC result set. Implementors
* should handle possibility of null values.
*
* @see Type#hydrate(ResultSet, String[], SessionImplementor, Object) alternative, 2-phase property initialization
* @param rs
* @param names the column names
* @param session
* @param rs The result set from which to extract value.
* @param names the column names making up this type value (use to read from result set)
* @param session The originating session
* @param owner the parent entity
* @return Object
* @throws HibernateException
* @throws SQLException
*
* @return The extracted value
*
* @throws HibernateException An error from Hibernate
* @throws SQLException An error from the JDBC driver
*
* @see Type#hydrate(ResultSet, String[], SessionImplementor, Object) alternative, 2-phase property initialization
*/
public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner)
throws HibernateException, SQLException;
/**
* Retrieve an instance of the mapped class from a JDBC resultset. Implementations
* should handle possibility of null values. This method might be called if the
* type is known to be a single-column type.
* Extract a value of the {@link #getReturnedClass() mapped class} from the JDBC result set. Implementors
* should handle possibility of null values. This form might be called if the type is known to be a
* single-column type.
*
* @param rs
* @param name the column name
* @param session
* @param rs The result set from which to extract value.
* @param name the column name making up this type value (use to read from result set)
* @param session The originating session
* @param owner the parent entity
* @return Object
* @throws HibernateException
* @throws SQLException
*
* @return The extracted value
*
* @throws HibernateException An error from Hibernate
* @throws SQLException An error from the JDBC driver
*/
public Object nullSafeGet(ResultSet rs, String name, SessionImplementor session, Object owner)
throws HibernateException, SQLException;
/**
* Write an instance of the mapped class to a prepared statement, ignoring some columns.
* Implementors should handle possibility of null values. A multi-column type should be
* written to parameters starting from <tt>index</tt>.
* @param st
* @param value the object to write
* @param index statement parameter index
* @param settable an array indicating which columns to ignore
* @param session
* Bind a value represented by an instance of the {@link #getReturnedClass() mapped class} to the JDBC prepared
* statement, ignoring some columns as dictated by the 'settable' parameter. Implementors should handle the
* possibility of null values. A multi-column type should bind parameters starting from <tt>index</tt>.
*
* @throws HibernateException
* @throws SQLException
* @param st The JDBC prepared statement to which to bind
* @param value the object to write
* @param index starting parameter bind index
* @param settable an array indicating which columns to bind/ignore
* @param session The originating session
*
* @throws HibernateException An error from Hibernate
* @throws SQLException An error from the JDBC driver
*/
public void nullSafeSet(PreparedStatement st, Object value, int index, boolean[] settable, SessionImplementor session)
throws HibernateException, SQLException;
/**
* Write an instance of the mapped class to a prepared statement. Implementors
* should handle possibility of null values. A multi-column type should be written
* to parameters starting from <tt>index</tt>.
* @param st
* @param value the object to write
* @param index statement parameter index
* @param session
* Bind a value represented by an instance of the {@link #getReturnedClass() mapped class} to the JDBC prepared
* statement. Implementors should handle possibility of null values. A multi-column type should bind parameters
* starting from <tt>index</tt>.
*
* @throws HibernateException
* @throws SQLException
* @param st The JDBC prepared statement to which to bind
* @param value the object to write
* @param index starting parameter bind index
* @param session The originating session
*
* @throws HibernateException An error from Hibernate
* @throws SQLException An error from the JDBC driver
*/
public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session)
throws HibernateException, SQLException;
/**
* A representation of the value to be embedded in an XML element.
* Generate a representation of the value for logging purposes.
*
* @param value
* @param factory
* @return String
* @throws HibernateException
*/
public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory)
throws HibernateException;
/**
* A representation of the value to be embedded in a log file.
* @param value The value to be logged
* @param factory The session factory
*
* @param value
* @param factory
* @return String
* @throws HibernateException
* @return The loggable representation
*
* @throws HibernateException An error from Hibernate
*/
public String toLoggableString(Object value, SessionFactoryImplementor factory)
throws HibernateException;
/**
* Parse the XML representation of an instance.
* @param xml
* @param factory
* A representation of the value to be embedded in an XML element.
*
* @return an instance of the type
* @throws HibernateException
* @param node The XML node to which to write the value
* @param value The value to write
* @param factory The session factory
*
* @throws HibernateException An error from Hibernate
*/
public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory)
throws HibernateException;
/**
* Parse the XML representation of an instance.
*
* @param xml The XML node from which to read the value
* @param factory The session factory
*
* @return an instance of the {@link #getReturnedClass() mapped class}
*
* @throws HibernateException An error from Hibernate
*/
public Object fromXMLNode(Node xml, Mapping factory) throws HibernateException;
@ -403,12 +416,14 @@ public interface Type extends Serializable {
public String getName();
/**
* Return a deep copy of the persistent state, stopping at entities and at
* collections.
* Return a deep copy of the persistent state, stopping at entities and at collections.
*
* @param value generally a collection element or entity field
* @param factory
* @return Object a copy
* @param value The value to be copied
* @param factory The session factory
*
* @return The deep copy
*
* @throws HibernateException An error from Hibernate
*/
public Object deepCopy(Object value, SessionFactoryImplementor factory)
throws HibernateException;
@ -423,20 +438,30 @@ public interface Type extends Serializable {
public boolean isMutable();
/**
* Return a cacheable "disassembled" representation of the object.
* Return a disassembled representation of the object. This is the value Hibernate will use in second level
* caching, so care should be taken to break values down to their simplest forms; for entities especially, this
* means breaking them down into their constituent parts.
*
* @param value the value to cache
* @param session the session
* @param session the originating session
* @param owner optional parent entity object (needed for collections)
*
* @return the disassembled, deep cloned state
*
* @throws HibernateException An error from Hibernate
*/
public Serializable disassemble(Object value, SessionImplementor session, Object owner) throws HibernateException;
/**
* Reconstruct the object from its cached "disassembled" state.
* Reconstruct the object from its disassembled state. This method is the reciprocal of {@link #disassemble}
*
* @param cached the disassembled state from the cache
* @param session the session
* @param session the originating session
* @param owner the parent entity object
* @return the the object
*
* @return the (re)assembled object
*
* @throws HibernateException An error from Hibernate
*/
public Object assemble(Serializable cached, SessionImplementor session, Object owner)
throws HibernateException;
@ -444,49 +469,75 @@ public interface Type extends Serializable {
/**
* Called before assembling a query result set from the query cache, to allow batch fetching
* of entities missing from the second-level cache.
*
* @param cached The key
* @param session The originating session
*/
public void beforeAssemble(Serializable cached, SessionImplementor session);
/**
* Retrieve an instance of the mapped class, or the identifier of an entity or collection,
* from a JDBC resultset. This is useful for 2-phase property initialization - the second
* phase is a call to <tt>resolveIdentifier()</tt>.
* Extract a value from the JDBC result set. This is useful for 2-phase property initialization - the second
* phase is a call to {@link #resolve}
* This hydrated value will be either:<ul>
* <li>in the case of an entity or collection type, the key</li>
* <li>otherwise, the value itself</li>
* </ul>
*
* @see Type#resolve(Object, SessionImplementor, Object)
* @param rs
* @param names the column names
* @param session the session
* @param rs The JDBC result set
* @param names the column names making up this type value (use to read from result set)
* @param session The originating session
* @param owner the parent entity
* @return Object an identifier or actual value
* @throws HibernateException
* @throws SQLException
*
* @return An entity or collection key, or an actual value.
*
* @throws HibernateException An error from Hibernate
* @throws SQLException An error from the JDBC driver
*
* @see #resolve
*/
public Object hydrate(ResultSet rs, String[] names, SessionImplementor session, Object owner)
throws HibernateException, SQLException;
/**
* Map identifiers to entities or collections. This is the second phase of 2-phase property
* initialization.
* The second phase of 2-phase loading. Only really pertinent for entities and collections. Here we resolve the
* identifier to an entity or collection instance
*
* @see Type#hydrate(ResultSet, String[], SessionImplementor, Object)
* @param value an identifier or value returned by <tt>hydrate()</tt>
* @param owner the parent entity
* @param session the session
*
* @return the given value, or the value associated with the identifier
* @throws HibernateException
*
* @throws HibernateException An error from Hibernate
*
* @see #hydrate
*/
public Object resolve(Object value, SessionImplementor session, Object owner)
throws HibernateException;
/**
* Given a hydrated, but unresolved value, return a value that may be used to
* reconstruct property-ref associations.
* Given a hydrated, but unresolved value, return a value that may be used to reconstruct property-ref
* associations.
*
* @param value The unresolved, hydrated value
* @param session THe originating session
* @param owner The value owner
*
* @return The semi-resolved value
*
* @throws HibernateException An error from Hibernate
*/
public Object semiResolve(Object value, SessionImplementor session, Object owner)
throws HibernateException;
/**
* Get the type of a semi-resolved value.
* As part of 2-phase loading, when we perform resolving what is the resolved type for this type? Generally
* speaking the type and its semi-resolved type will be the same. The main deviation from this is in the
* case of an entity where the type would be the entity type and semi-resolved type would be its identifier type
*
* @param factory The session factory
*
* @return The semi-resolved type
*/
public Type getSemiResolvedType(SessionFactoryImplementor factory);
@ -499,15 +550,20 @@ public interface Type extends Serializable {
*
* @param original the value from the detached entity being merged
* @param target the value in the managed entity
* @param session The originating session
* @param owner The owner of the value
* @param copyCache The cache of already copied/replaced values
*
* @return the value to be merged
*
* @throws HibernateException An error from Hibernate
*/
public Object replace(
Object original,
Object target,
SessionImplementor session,
Object owner,
Map copyCache)
throws HibernateException;
Map copyCache) throws HibernateException;
/**
* During merge, replace the existing (target) value in the entity we are merging to
@ -518,7 +574,14 @@ public interface Type extends Serializable {
*
* @param original the value from the detached entity being merged
* @param target the value in the managed entity
* @param session The originating session
* @param owner The owner of the value
* @param copyCache The cache of already copied/replaced values
* @param foreignKeyDirection For associations, which direction does the foreign key point?
*
* @return the value to be merged
*
* @throws HibernateException An error from Hibernate
*/
public Object replace(
Object original,
@ -526,14 +589,16 @@ public interface Type extends Serializable {
SessionImplementor session,
Object owner,
Map copyCache,
ForeignKeyDirection foreignKeyDirection)
throws HibernateException;
ForeignKeyDirection foreignKeyDirection) throws HibernateException;
/**
* Given an instance of the type, return an array of boolean, indicating
* which mapped columns would be null.
*
* @param value an instance of the type
* @param mapping The mapping abstraction
*
* @return array indicating column nullness for a value instance
*/
public boolean[] toColumnNullness(Object value, Mapping mapping);

View File

@ -45,7 +45,7 @@ public interface SqlTypeDescriptor extends Serializable {
/**
* Is this descriptor available for remapping?
*
* @return
* @return {@code true} indicates this descriptor can be remapped; otherwise, {@code false}
*
* @see org.hibernate.type.descriptor.WrapperOptions#remapSqlTypeDescriptor
* @see org.hibernate.dialect.Dialect#remapSqlTypeDescriptor

View File

@ -1,8 +1,10 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -32,7 +34,8 @@ import org.hibernate.Session;
public interface HibernateEntityManager extends EntityManager {
/**
* Retrieve a reference to the Hibernate {@link Session} used by this {@link EntityManager}.
* @return
*
* @return The session
*/
public Session getSession();
}

View File

@ -88,17 +88,12 @@ public abstract class AbstractFromImpl<Z,X>
return getAlias();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean canBeDereferenced() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public void prepareAlias(CriteriaQueryCompiler.RenderingContext renderingContext) {
if ( getAlias() == null ) {
if ( isCorrelated() ) {
@ -121,23 +116,16 @@ public abstract class AbstractFromImpl<Z,X>
return renderProjection( renderingContext );
}
/**
* {@inheritDoc}
*/
@Override
public Attribute<?, ?> getAttribute() {
return null;
}
/**
* {@inheritDoc}
*/
public From<?, Z> getParent() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
protected Attribute<X, ?> locateAttributeInternal(String name) {
return (Attribute<X, ?>) locateManagedType().getAttribute( name );
@ -170,6 +158,7 @@ public abstract class AbstractFromImpl<Z,X>
}
protected class BasicJoinScope implements JoinScope<X> {
@Override
public void addJoin(Join<X, ?> join) {
if ( joins == null ) {
joins = new LinkedHashSet<Join<X,?>>();
@ -177,6 +166,7 @@ public abstract class AbstractFromImpl<Z,X>
joins.add( join );
}
@Override
public void addFetch(Fetch<X, ?> fetch) {
if ( fetches == null ) {
fetches = new LinkedHashSet<Fetch<X,?>>();
@ -186,6 +176,7 @@ public abstract class AbstractFromImpl<Z,X>
}
protected class CorrelationJoinScope implements JoinScope<X> {
@Override
public void addJoin(Join<X, ?> join) {
if ( joins == null ) {
joins = new LinkedHashSet<Join<X,?>>();
@ -193,28 +184,23 @@ public abstract class AbstractFromImpl<Z,X>
joins.add( join );
}
@Override
public void addFetch(Fetch<X, ?> fetch) {
throw new UnsupportedOperationException( "Cannot define fetch from a subquery correlation" );
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCorrelated() {
return getCorrelationParent() != null;
}
/**
* {@inheritDoc}
*/
@Override
public FromImplementor<Z,X> getCorrelationParent() {
return correlationParent;
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public FromImplementor<Z, X> correlateTo(CriteriaSubqueryImpl subquery) {
final FromImplementor<Z, X> correlationDelegate = createCorrelationDelegate();
@ -224,6 +210,7 @@ public abstract class AbstractFromImpl<Z,X>
protected abstract FromImplementor<Z, X> createCorrelationDelegate();
@Override
public void prepareCorrelationDelegate(FromImplementor<Z, X> parent) {
this.joinScope = new CorrelationJoinScope();
this.correlationParent = parent;
@ -244,9 +231,7 @@ public abstract class AbstractFromImpl<Z,X>
);
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public Set<Join<X, ?>> getJoins() {
return joins == null
@ -254,16 +239,12 @@ public abstract class AbstractFromImpl<Z,X>
: joins;
}
/**
* {@inheritDoc}
*/
@Override
public <Y> Join<X, Y> join(SingularAttribute<? super X, Y> singularAttribute) {
return join( singularAttribute, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
public <Y> Join<X, Y> join(SingularAttribute<? super X, Y> attribute, JoinType jt) {
if ( ! canBeJoinSource() ) {
throw illegalJoin();
@ -294,16 +275,13 @@ public abstract class AbstractFromImpl<Z,X>
jt
);
}
/**
* {@inheritDoc}
*/
@Override
public <Y> CollectionJoin<X, Y> join(CollectionAttribute<? super X, Y> collection) {
return join( collection, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
public <Y> CollectionJoin<X, Y> join(CollectionAttribute<? super X, Y> collection, JoinType jt) {
if ( ! canBeJoinSource() ) {
throw illegalJoin();
@ -331,16 +309,12 @@ public abstract class AbstractFromImpl<Z,X>
);
}
/**
* {@inheritDoc}
*/
@Override
public <Y> SetJoin<X, Y> join(SetAttribute<? super X, Y> set) {
return join( set, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
public <Y> SetJoin<X, Y> join(SetAttribute<? super X, Y> set, JoinType jt) {
if ( ! canBeJoinSource() ) {
throw illegalJoin();
@ -362,16 +336,12 @@ public abstract class AbstractFromImpl<Z,X>
return new SetAttributeJoin<X,Y>( criteriaBuilder(), attributeType, this, set, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <Y> ListJoin<X, Y> join(ListAttribute<? super X, Y> list) {
return join( list, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
public <Y> ListJoin<X, Y> join(ListAttribute<? super X, Y> list, JoinType jt) {
if ( ! canBeJoinSource() ) {
throw illegalJoin();
@ -393,16 +363,12 @@ public abstract class AbstractFromImpl<Z,X>
return new ListAttributeJoin<X,Y>( criteriaBuilder(), attributeType, this, list, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <K, V> MapJoin<X, K, V> join(MapAttribute<? super X, K, V> map) {
return join( map, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
public <K, V> MapJoin<X, K, V> join(MapAttribute<? super X, K, V> map, JoinType jt) {
if ( ! canBeJoinSource() ) {
throw illegalJoin();
@ -424,16 +390,12 @@ public abstract class AbstractFromImpl<Z,X>
return new MapAttributeJoin<X, K, V>( criteriaBuilder(), attributeType, this, map, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <X,Y> Join<X, Y> join(String attributeName) {
return join( attributeName, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public <X,Y> Join<X, Y> join(String attributeName, JoinType jt) {
if ( ! canBeJoinSource() ) {
@ -465,16 +427,12 @@ public abstract class AbstractFromImpl<Z,X>
}
}
/**
* {@inheritDoc}
*/
@Override
public <X,Y> CollectionJoin<X, Y> joinCollection(String attributeName) {
return joinCollection( attributeName, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public <X,Y> CollectionJoin<X, Y> joinCollection(String attributeName, JoinType jt) {
final Attribute<X,?> attribute = (Attribute<X, ?>) locateAttribute( attributeName );
@ -490,16 +448,12 @@ public abstract class AbstractFromImpl<Z,X>
return (CollectionJoin<X,Y>) join( (CollectionAttribute) attribute, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <X,Y> SetJoin<X, Y> joinSet(String attributeName) {
return joinSet( attributeName, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public <X,Y> SetJoin<X, Y> joinSet(String attributeName, JoinType jt) {
final Attribute<X,?> attribute = (Attribute<X, ?>) locateAttribute( attributeName );
@ -515,16 +469,12 @@ public abstract class AbstractFromImpl<Z,X>
return (SetJoin<X,Y>) join( (SetAttribute) attribute, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <X,Y> ListJoin<X, Y> joinList(String attributeName) {
return joinList( attributeName, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public <X,Y> ListJoin<X, Y> joinList(String attributeName, JoinType jt) {
final Attribute<X,?> attribute = (Attribute<X, ?>) locateAttribute( attributeName );
@ -540,16 +490,12 @@ public abstract class AbstractFromImpl<Z,X>
return (ListJoin<X,Y>) join( (ListAttribute) attribute, jt );
}
/**
* {@inheritDoc}
*/
@Override
public <X, K, V> MapJoin<X, K, V> joinMap(String attributeName) {
return joinMap( attributeName, DEFAULT_JOIN_TYPE );
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public <X, K, V> MapJoin<X, K, V> joinMap(String attributeName, JoinType jt) {
final Attribute<X,?> attribute = (Attribute<X, ?>) locateAttribute( attributeName );
@ -579,9 +525,7 @@ public abstract class AbstractFromImpl<Z,X>
);
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({ "unchecked" })
public Set<Fetch<X, ?>> getFetches() {
return fetches == null
@ -589,10 +533,12 @@ public abstract class AbstractFromImpl<Z,X>
: fetches;
}
@Override
public <Y> Fetch<X, Y> fetch(SingularAttribute<? super X, Y> singularAttribute) {
return fetch( singularAttribute, DEFAULT_JOIN_TYPE );
}
@Override
public <Y> Fetch<X, Y> fetch(SingularAttribute<? super X, Y> attribute, JoinType jt) {
if ( ! canBeFetchSource() ) {
throw illegalFetch();
@ -603,10 +549,12 @@ public abstract class AbstractFromImpl<Z,X>
return fetch;
}
@Override
public <Y> Fetch<X, Y> fetch(PluralAttribute<? super X, ?, Y> pluralAttribute) {
return fetch( pluralAttribute, DEFAULT_JOIN_TYPE );
}
@Override
public <Y> Fetch<X, Y> fetch(PluralAttribute<? super X, ?, Y> pluralAttribute, JoinType jt) {
if ( ! canBeFetchSource() ) {
throw illegalFetch();
@ -630,10 +578,12 @@ public abstract class AbstractFromImpl<Z,X>
return fetch;
}
@Override
public <X,Y> Fetch<X, Y> fetch(String attributeName) {
return fetch( attributeName, DEFAULT_JOIN_TYPE );
}
@Override
@SuppressWarnings({ "unchecked" })
public <X,Y> Fetch<X, Y> fetch(String attributeName, JoinType jt) {
if ( ! canBeFetchSource() ) {

View File

@ -34,8 +34,10 @@ public class AuditEntityNameRegister {
/**
* Creates a unique (not yet registered) audit entity name by appending consecutive numbers to the base
* name. If the base name is not yet used, it is returned unmodified.
*
* @param baseAuditEntityName The base entity name.
* @return
*
* @return A unique audit entity name
*/
public String createUnique(final String baseAuditEntityName) {
String auditEntityName = baseAuditEntityName;

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -73,10 +73,10 @@ public class FirstLevelCache {
/**
* Adds the entityName into the cache. The key is a triple make with primaryKey, revision and entity
* @param id, primaryKey
* @param revision, revision number
* @param entity, object retrieved by envers
* @param entityName, value of the cache
* @param id primaryKey
* @param revision revision number
* @param entity object retrieved by envers
* @param entityName value of the cache
*/
public void putOnEntityNameCache(Object id, Number revision, Object entity, String entityName) {
LOG.debugf("Caching entityName on First Level Cache: - primaryKey:%s - revision:%s - entity:%s -> entityName:%s",
@ -89,9 +89,12 @@ public class FirstLevelCache {
/**
* Gets the entityName from the cache. The key is a triple make with primaryKey, revision and entity
* @param id, primaryKey
* @param revision, revision number
* @param entity, object retrieved by envers
*
* @param id primaryKey
* @param revision revision number
* @param entity object retrieved by envers
*
* @return The appropriate entity name
*/
public String getFromEntityNameCache(Object id, Number revision, Object entity) {
LOG.debugf("Trying to resolve entityName from First Level Cache: - primaryKey:%s - revision:%s - entity:%s",

View File

@ -36,8 +36,9 @@ public interface DialectCheck {
/**
* Does the given dialect match the defined check?
*
* @param dialect
* @return
* @param dialect The dialect against which to check
*
* @return {@code true} if it matches; {@code false} otherwise.
*/
public boolean isMatch(Dialect dialect);
}

View File

@ -32,7 +32,6 @@ import java.lang.annotation.Target;
* Annotation used to identify a method as a callback to be executed whenever a {@link FailureExpected} is handled.
*
* @author Steve Ebersole
* @see
*/
@Retention( RetentionPolicy.RUNTIME )
@Target( ElementType.METHOD )

View File

@ -12,20 +12,25 @@ ideaModule {
javadocBuildDir = dir( buildDirName + "/documentation/javadocs" )
def List subProjectsToSkipForJavadoc = ['release','documentation'];
def List sourceSetsToSkipForJavadoc = ['test','matrix'];
def copyRightYear = new java.util.GregorianCalendar().get( java.util.Calendar.YEAR );
task aggregateJavadocs(type: Javadoc) {
// exclude any generated sources (this is not working: http://forums.gradle.org/gradle/topics/excluding_generated_source_from_javadoc)
exclude "**/generated-src/**"
// process each project, building up:
// 1) appropriate sources
// 2) classpath
// 3) the package list for groups
Set<String> apiPackages = new HashSet<String>()
Set<String> spiPackages = new HashSet<String>()
Set<String> internalPackages = new HashSet<String>()
parent.subprojects.each{ subProject->
if ( !subProjectsToSkipForJavadoc.contains( subProject.name ) ) {
subProject.sourceSets.each { sourceSet ->
if ( !sourceSetsToSkipForJavadoc.contains( sourceSet.name ) ) {
parent.subprojects.each{ Project subProject->
// skip certain sub-projects
if ( ! ['release','documentation'].contains( subProject.name ) ) {
subProject.sourceSets.each { SourceSet sourceSet ->
// skip certain source sets
if ( ! ['test','matrix'].contains( sourceSet.name ) ) {
source sourceSet.java
if( classpath ) {
@ -43,6 +48,9 @@ task aggregateJavadocs(type: Javadoc) {
else if ( packageName.endsWith( ".spi" ) || packageName.contains( ".spi." ) ) {
spiPackages.add( packageName );
}
else if ( packageName.startsWith( "org.hibernate.testing" ) ) {
// do nothing as testing support is already handled...
}
else {
apiPackages.add( packageName );
}
@ -52,11 +60,12 @@ task aggregateJavadocs(type: Javadoc) {
}
}
// apply standard config
description = "Build the aggregated JavaDocs for all modules"
maxMemory = '512m'
destinationDir = javadocBuildDir.dir
configure( options ) {
overview = new File( projectDir, 'src/javadoc/package.html' )
overview = new File( projectDir, 'src/javadoc/overview.html' )
stylesheetFile = new File( projectDir, 'src/javadoc/stylesheet.css' )
windowTitle = 'Hibernate JavaDocs'
docTitle = "Hibernate JavaDoc ($project.version)"
@ -67,6 +76,10 @@ task aggregateJavadocs(type: Javadoc) {
group( 'SPI', spiPackages.asList() )
group( 'Internal', internalPackages.asList() )
group ( 'Testing Support', ['org.hibernate.testing*'] )
// ugh, http://issues.gradle.org/browse/GRADLE-1563
// tags ["todo:X"]
// work around:
addStringOption( "tag", "todo:X" )
}
}

View File

@ -74,6 +74,7 @@ This documentation groups packages into the following 3 categories:<ul>
between releases whereas APIs and SPIs are more stable.
</li>
</ul>
Additionally, we highlight a 4th category
<hr/>
Complete Hibernate documentation may be found online at <a href="http://docs.jboss.org/hibernate/">http://docs.jboss.org/hibernate/</a>